cengal.hardware.memory.shared_memory.versions.v_1.shared_memory
Module Docstring Docstrings: http://www.python.org/dev/peps/pep-0257/
1#!/usr/bin/env python 2# coding=utf-8 3 4# Copyright © 2012-2024 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space> 5# 6# Licensed under the Apache License, Version 2.0 (the "License"); 7# you may not use this file except in compliance with the License. 8# You may obtain a copy of the License at 9# 10# http://www.apache.org/licenses/LICENSE-2.0 11# 12# Unless required by applicable law or agreed to in writing, software 13# distributed under the License is distributed on an "AS IS" BASIS, 14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15# See the License for the specific language governing permissions and 16# limitations under the License. 17 18 19# __all__ = ['SharedMemory', 'QueueType', 'Offset', 'Size', 'SharedMemoryError', 20# 'WrongObjectTypeError', 'NoMessagesInQueueError', 21# 'nearest_size', 'nsize', 'TBase', 'IList', 'codec_by_type', 'get_in_line', 'wait_my_turn'] 22 23 24""" 25Module Docstring 26Docstrings: http://www.python.org/dev/peps/pep-0257/ 27""" 28 29__author__ = "ButenkoMS <gtalk@butenkoms.space>" 30__copyright__ = "Copyright © 2012-2024 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>" 31__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ] 32__license__ = "Apache License, Version 2.0" 33__version__ = "4.4.1" 34__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>" 35__email__ = "gtalk@butenkoms.space" 36# __status__ = "Prototype" 37__status__ = "Development" 38# __status__ = "Production" 39 40 41from cengal.introspection.inspect import is_callable, is_descriptor, is_async 42from cengal.math.numbers import RationalNumber 43from cengal.hardware.memory.barriers import full_memory_barrier, mm_pause 44from cengal.time_management.cpu_clock import cpu_clock 45from cengal.time_management.high_precision_sync_sleep import hps_sleep 46from cengal.time_management.sleep_tools import sleep 47from cengal.introspection.inspect import pdi, pifrl, intro_func_repr_limited 48from cengal.system import OS_TYPE 49from cengal.file_system.file_manager import file_exists 50from cengal.data_manipulation.conversion.binary import bint_to_bytes, bytes_to_bint 51from cengal.introspection.inspect import is_setable_data_descriptor 52# from .compilable import write_uint64 as write_uint64_c, read_uint64 as read_uint64_c, write_int64, read_int64, write_double, read_double, zero_memory 53from .compilable import write_uint64, read_uint64, read_uint8, write_int64, read_int64, write_double, read_double, \ 54 zero_memory, list__get_item, list__get_item_as_offset, list__set_item, list__set_item_as_offset, mask_least_significant_bits 55 56import os 57import asyncio 58import pickle 59import ctypes 60import numpy as np 61from datetime import datetime, timedelta, timezone, date, time 62from decimal import Decimal 63from enum import IntEnum 64from multiprocessing.shared_memory import SharedMemory as MultiprocessingSharedMemory 65from array import array 66from inspect import isclass, ismodule, getattr_static 67from contextlib import contextmanager 68from pathlib import PurePath 69from math import log2, ceil 70from pickle import dumps as pickle_dumps, loads as pickle_loads 71from inspect import isfunction, ismethod, isclass, ismethoddescriptor 72from collections.abc import Sequence as AbsSequence, MutableSequence as AbsMutableSequence, Set as AbsSet, \ 73 MutableSet as AbsMutableSet, Mapping as AbsMapping, MutableMapping as AbsMutableMapping 74try: 75 from torch import Tensor, from_numpy 76except ImportError: 77 class Tensor: 78 def numpy(self) -> np.ndarray: 79 raise NotImplementedError 80 81 def from_numpy(numpy_ndarray: np.ndarray) -> Tensor: 82 raise NotImplementedError 83 84from types import FrameType, CodeType 85from typing import Any, Tuple, Optional, List, Dict, Set, FrozenSet, AbstractSet, Type, Union, Sequence, cast, Hashable, Coroutine 86 87 88DEBUG = False 89 90 91current_shared_memory_instance: 'SharedMemory' = None 92 93 94# def write_uint64(base_address: int, offset: int, value: int): 95# if current_shared_memory_instance is not None: 96# if 460 <= offset <= 564: 97# print('write_uint64: offset_to_be_monitored: offset: {}, value: {}'.format(offset, value)) 98 99# write_uint64_cython(base_address, offset, value) 100 101 102# def write_uint64(base_address: int, offset: int, value: int): 103# if current_shared_memory_instance is None: 104# return write_uint64_c(base_address, offset, value) 105# else: 106# return current_shared_memory_instance.write_uint64(offset, value) 107 108# def read_uint64(base_address: int, offset: int) -> int: 109# if current_shared_memory_instance is None: 110# return read_uint64_c(base_address, offset) 111# else: 112# return current_shared_memory_instance.read_uint64(offset) 113 114 115class QueueType(IntEnum): 116 fifo = 0 117 lifo = 1 118 119 120class ObjectType(IntEnum): 121 tfree_memory = 0 122 tmessage = 1 123 tnone = 2 124 tbool = 3 125 tint = 4 126 tfloat = 5 127 tcomplex = 6 128 tstr = 7 129 tbytes = 8 130 tbytearray = 9 131 ttuple = 10 132 tlist = 11 133 tmutableset = 12 134 tset = 13 135 tmutablemapping = 14 136 tmapping = 15 137 tfastdict = 16 138 tclass = 17 139 tpickable = 18 140 tinternal_list = 19 141 tsmallint = 20 142 tbigint = 21 143 tgeneralobject = 22 144 tnumpyndarray = 23 145 ttorchtensor = 24 146 tstaticobject = 25 147 tfastset = 26 148 tslice = 27 149 tdecimal = 28 150 tdatetime = 29 151 tstaticobjectwithslots = 30 152 153 154class SysValuesOffsets(IntEnum): 155 total_mem_size = 0 156 data_start_offset = 1 157 data_size = 2 158 data_end_offset = 3 159 free_memory_search_start = 4 160 first_message_offset = 5 161 last_message_offset = 6 162 creator_in_charge = 7 163 consumer_in_charge = 8 164 creator_wants_to_be_in_charge = 9 165 consumer_wants_to_be_in_charge = 10 166 creator_ready = 11 167 consumer_ready = 12 168 169 170Offset = int 171Size = int 172minimal_memory_block_size = 8 173block_size = minimal_memory_block_size 174bs = block_size 175 176 177class SharedMemoryError(Exception): 178 pass 179 180 181class OperationTimedOutError(SharedMemoryError): 182 pass 183 184 185class FreeMemoryChunkNotFoundError(SharedMemoryError): 186 """Indicates that an unpartitioned chunk of free memory of requested size not being found. 187 188 Regarding this error, it’s important to adjust the size parameter in the SharedMemory configuration. Trying to estimate memory consumption down to the byte is not practical because it fails to account for the memory overhead required by each entity stored (such as entity type metadata, pointers to child entities, etc.). 189 190 When setting the size parameter for SharedMemory, consider using broader units like tens (for embedded systems), hundreds, or thousands of megabytes, rather than precise byte counts. This approach is similar to how you would not precisely calculate the amount of memory needed for a web server hosted externally; you make an educated guess, like assuming that 256 MB might be insufficient but 768 MB could be adequate, and then adjust based on practical testing. 191 192 Also, be aware of memory fragmentation, which affects all memory allocation systems, including the OS itself. For example, if you have a SharedMemory pool sized to store exactly ten 64-bit integers, accounting for additional bytes for system information, your total might be around 200 bytes. Initially, after storing the integers, your memory might appear as ["int", "int", ..., "int"]. If you delete every second integer, the largest contiguous free memory chunk could be just 10 bytes, despite having 50 bytes free in total. This fragmentation means you cannot store a larger data structure like a 20-byte string which needs contiguous space. 193 194 To resolve this, simply increase the size parameter value of SharedMemory. This is akin to how you would manage memory allocation for server hosting or thread stack sizes in software development. 195 196 Args: 197 SharedMemoryError (_type_): _description_ 198 """ 199 pass 200 201 202class ObjBufferIsSmallerThanRequestedNumpyArrayError(SharedMemoryError): 203 pass 204 205 206class WrongObjectTypeError(SharedMemoryError): 207 pass 208 209 210class NoMessagesInQueueError(SharedMemoryError): 211 pass 212 213 214def nearest_size(size: Size) -> Size: 215 return ((size // bs) * bs + bs) if size % bs else size 216 217 218nsize = nearest_size 219 220 221class BaseIObject: 222 pass 223 224 225# TODO: add next fields: obj_id (simple int index; need to identify object in shared memory); ref_count (simple int counter; need to count references to object. Howerver this field can be moved to shared memory dict with all objects properties like ref_count, etc.) 226class BaseObjOffsets(IntEnum): 227 obj_type = 0 228 obj_size = 1 229 230 231BaseObjOffsetsLen: int = len(BaseObjOffsets) # 2 232bsBaseObjOffsetsLen: int = bs * len(BaseObjOffsets) # 8 * 2 = 16 233 234 235class TBase: 236 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 237 raise NotImplementedError 238 239 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 240 raise NotImplementedError 241 242 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 243 raise NotImplementedError 244 245 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 246 raise NotImplementedError 247 248 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 249 raise NotImplementedError 250 251 252# ====================================================================================================================== 253# === None ===================================================================================================== 254 255 256class TNone: 257 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: None) -> Tuple[None, Offset, Size]: 258 offset, real_size = shared_memory.malloc(ObjectType.tnone, 0) 259 return obj, offset, real_size 260 261 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 262 if ObjectType.tnone != read_uint64(shared_memory.base_address, offset): 263 raise WrongObjectTypeError 264 265 return None 266 267 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 268 if ObjectType.tnone != read_uint64(shared_memory.base_address, offset): 269 raise WrongObjectTypeError 270 271 shared_memory.free(offset) 272 273 274# ====================================================================================================================== 275# === Int ===================================================================================================== 276 277 278class IntOffsets(IntEnum): 279 data = 0 280 281 282class TInt: 283 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]: 284 offset, real_size = shared_memory.malloc(ObjectType.tint, bs * len(IntOffsets)) 285 write_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * IntOffsets.data, obj) 286 return obj, offset, real_size 287 288 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int: 289 if ObjectType.tint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 290 raise WrongObjectTypeError 291 292 return read_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * IntOffsets.data) 293 294 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 295 if ObjectType.tint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 296 raise WrongObjectTypeError 297 298 shared_memory.free(offset) 299 300 301# ====================================================================================================================== 302# === SmallInt ===================================================================================================== 303 304 305class SmallInt(int): 306 ... 307 308 309smallint = SmallInt 310sint = SmallInt 311 312 313class SmallIntOffsets(IntEnum): 314 data = 0 315 316 317class TSmallInt: 318 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]: 319 offset, real_size = shared_memory.malloc(ObjectType.tsmallint, bs * len(SmallIntOffsets)) 320 write_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SmallIntOffsets.data, obj) 321 return obj, offset, real_size 322 323 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int: 324 if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 325 raise WrongObjectTypeError 326 327 return read_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SmallIntOffsets.data) 328 329 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 330 if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 331 raise WrongObjectTypeError 332 333 shared_memory.free(offset) 334 335 336# ====================================================================================================================== 337# === LargeInt ===================================================================================================== 338 339 340class BigInt(int): 341 ... 342 343 344bigint = BigInt 345bint = BigInt 346 347 348class BigIntOffsets(IntEnum): 349 data_size = 0 350 data = 1 351 352 353class TBigInt: 354 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]: 355 data = bint_to_bytes(obj) 356 data_size = len(data) 357 # offset, real_size = shared_memory.malloc(ObjectType.tbigint, bs * len(BigIntOffsets) + bs * data_size) 358 offset, real_size = shared_memory.malloc(ObjectType.tbigint, bs * len(BigIntOffsets) + data_size) 359 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size, data_size) 360 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 361 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data 362 return obj, offset, real_size 363 364 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int: 365 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 366 raise WrongObjectTypeError 367 368 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 369 if data_size: 370 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 371 data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 372 return bytes_to_bint(data) 373 else: 374 return 0 375 376 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 377 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 378 raise WrongObjectTypeError 379 380 shared_memory.free(offset) 381 382 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 383 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 384 raise WrongObjectTypeError 385 386 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 387 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 388 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 389 390 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 391 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 392 raise WrongObjectTypeError 393 394 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 395 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 396 return data_offset, data_size 397 398 399# ====================================================================================================================== 400# === Bool ===================================================================================================== 401 402 403class BoolOffsets(IntEnum): 404 data = 0 405 406 407class TBool: 408 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bool) -> Tuple[bool, Offset, Size]: 409 offset, real_size = shared_memory.malloc(ObjectType.tbool, bs * len(BoolOffsets)) 410 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BoolOffsets.data, int(obj)) 411 return obj, offset, real_size 412 413 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bool: 414 if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 415 raise WrongObjectTypeError 416 417 return bool(read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BoolOffsets.data)) 418 419 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 420 if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 421 raise WrongObjectTypeError 422 423 shared_memory.free(offset) 424 425 426# ====================================================================================================================== 427# === Float ===================================================================================================== 428 429 430class FloatOffsets(IntEnum): 431 data = 0 432 433 434class TFloat: 435 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: float) -> Tuple[float, Offset, Size]: 436 offset, real_size = shared_memory.malloc(ObjectType.tfloat, bs * len(FloatOffsets)) 437 write_double(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FloatOffsets.data, obj) 438 return obj, offset, real_size 439 440 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> float: 441 if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset): 442 raise WrongObjectTypeError 443 444 return read_double(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FloatOffsets.data) 445 446 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 447 if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset): 448 raise WrongObjectTypeError 449 450 shared_memory.free(offset) 451 452 453# ====================================================================================================================== 454# === Bytes ===================================================================================================== 455 456 457class BytesOffsets(IntEnum): 458 data_size = 0 459 data = 1 460 461 462class TBytes: 463 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytes) -> Tuple[bytes, Offset, Size]: 464 data_size = len(obj) 465 # offset, real_size = shared_memory.malloc(ObjectType.tbytes, bs * len(BytesOffsets) + bs * data_size) 466 offset, real_size = shared_memory.malloc(ObjectType.tbytes, bs * len(BytesOffsets) + data_size) 467 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size, data_size) 468 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 469 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = obj 470 return obj, offset, real_size 471 472 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytes: 473 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 474 raise WrongObjectTypeError 475 476 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 477 if data_size: 478 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 479 obj = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 480 return obj 481 else: 482 return bytes() 483 484 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 485 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 486 raise WrongObjectTypeError 487 488 shared_memory.free(offset) 489 490 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 491 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 492 raise WrongObjectTypeError 493 494 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 495 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 496 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 497 498 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 499 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 500 raise WrongObjectTypeError 501 502 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 503 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 504 return data_offset, data_size 505 506 507# class TBytes: 508# def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytes) -> Tuple[bytes, Offset, Size]: 509# data_size = len(obj) 510# if 0 == data_size: 511# allocated_data_size = 1 512# else: 513# allocated_data_size = data_size 514 515# # offset, real_size = shared_memory.malloc(ObjectType.tbytes, bs * (len(BytesOffsets) - 1) + bs * allocated_data_size) 516# offset, real_size = shared_memory.malloc(ObjectType.tbytes, bs * (len(BytesOffsets) - 1) + allocated_data_size) 517# shared_memory.print_mem(offset, 100, f'TBytes.map_to_shared_memory 0: offset: {offset}, real_size: {real_size}') 518# write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size, data_size) 519# shared_memory.print_mem(offset, 100, f'TBytes.map_to_shared_memory 1: offset: {offset}, real_size: {real_size}') 520# data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 521# if data_size: 522# try: 523# shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = obj 524# except ValueError: 525# print(len(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]), shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 526# print(len(obj), obj) 527# raise 528 529# shared_memory.print_mem(offset, 100, f'TBytes.map_to_shared_memory 2: offset: {offset}, real_size: {real_size}, data_size: {data_size}, data_offset: {data_offset}') 530 531# return obj, offset, real_size 532 533# def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytes: 534# if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 535# raise WrongObjectTypeError 536 537# data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 538# data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 539# shared_memory.print_mem(offset, 100, f'TBytes.init_from_shared_memory 0: offset: {offset}, data_size: {data_size}, data_offset: {data_offset}') 540# if data_size: 541# obj = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 542# else: 543# obj = b'' 544 545# return obj 546 547# def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 548# shared_memory.free(offset) 549 550 551# ====================================================================================================================== 552# === Bytearray ===================================================================================================== 553 554 555class BytearrayOffsets(IntEnum): 556 data_size = 0 557 data = 1 558 559 560class TBytearray: 561 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytearray) -> Tuple[bytearray, Offset, Size]: 562 data = bytes(obj) 563 data_size = len(data) 564 # offset, real_size = shared_memory.malloc(ObjectType.tbytearray, bs * len(BytearrayOffsets) + bs * data_size) 565 offset, real_size = shared_memory.malloc(ObjectType.tbytearray, bs * len(BytearrayOffsets) + data_size) 566 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size, data_size) 567 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 568 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data 569 return obj, offset, real_size 570 571 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytearray: 572 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 573 raise WrongObjectTypeError 574 575 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 576 if data_size: 577 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 578 data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 579 return bytearray(data) 580 else: 581 return bytearray(bytes()) 582 583 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 584 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 585 raise WrongObjectTypeError 586 587 shared_memory.free(offset) 588 589 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 590 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 591 raise WrongObjectTypeError 592 593 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 594 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 595 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 596 597 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 598 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 599 raise WrongObjectTypeError 600 601 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 602 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 603 return data_offset, data_size 604 605 606# ====================================================================================================================== 607# === Str ===================================================================================================== 608 609 610class StrOffsets(IntEnum): 611 data_size = 0 612 data = 1 613 614 615class TStr: 616 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: str) -> Tuple[str, Offset, Size]: 617 data = str.encode(obj) 618 data_size = len(data) 619 # offset, real_size = shared_memory.malloc(ObjectType.tstr, bs * len(StrOffsets) + bs * data_size) 620 offset, real_size = shared_memory.malloc(ObjectType.tstr, bs * len(StrOffsets) + data_size) 621 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size, data_size) 622 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 623 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data 624 return obj, offset, real_size 625 626 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> str: 627 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 628 raise WrongObjectTypeError 629 630 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 631 if data_size: 632 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 633 data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 634 return data.decode() 635 else: 636 return str() 637 638 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 639 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 640 raise WrongObjectTypeError 641 642 shared_memory.free(offset) 643 644 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 645 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 646 raise WrongObjectTypeError 647 648 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 649 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 650 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 651 652 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 653 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 654 raise WrongObjectTypeError 655 656 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 657 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 658 return data_offset, data_size 659 660 661# ====================================================================================================================== 662# === ListTrue ========================================================================================================= 663# An old preoptimized version with a bunch of issues and bugs due to the wrong offsets. Deprecated. Use IList instead 664 665 666class InternalListTrueOffsets(IntEnum): 667 capacity = 0 668 size = 1 669 670 671def malloc_tinternal_list_true(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]: 672 capacity = (size << 1 if size else 16) if capacity is None else capacity 673 datas_sys_part_size = 8 * len(InternalListTrueOffsets) 674 offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, datas_sys_part_size + 8 * capacity) 675 data_offset = offset + datas_sys_part_size 676 write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, capacity) 677 write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size, size) 678 return offset, real_size 679 680 681def realloc_tinternal_list_true(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]: 682 datas_sys_part_size = 8 * len(InternalListTrueOffsets) 683 data_offset = offset + datas_sys_part_size 684 capacity = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity) 685 size = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size) 686 new_list_capacity = capacity << 1 if new_capacity is None else new_capacity 687 if new_capacity is None: 688 if desired_size is None: 689 new_list_capacity = capacity << 1 if capacity else 16 690 else: 691 new_list_capacity = desired_size << 1 if desired_size else 16 692 else: 693 new_list_capacity = new_capacity 694 695 if new_list_capacity < size: 696 new_list_capacity = size 697 698 new_offset, new_real_size = shared_memory.realloc(offset, datas_sys_part_size + 8 * new_list_capacity, loop_allowed, zero_mem) 699 data_offset = new_offset + datas_sys_part_size 700 write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, new_list_capacity) 701 return new_offset, new_real_size 702 703 704class IListTrue(BaseIObject, list): 705 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None: 706 self._shared_memory = shared_memory 707 self._base_address = shared_memory.base_address 708 if offset is None: 709 offset, real_size = shared_memory.malloc(ObjectType.tlist, 8) 710 self._offset = offset 711 self._offset__data = offset + 8 * len(BaseObjOffsets) 712 self._offset__pointer_to_internal_list = self._offset__data 713 714 if obj is None: 715 obj = list() 716 717 data_len = len(obj) 718 capacity_len = data_len << 1 if data_len else 16 719 internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len, capacity_len) 720 self._pointer_to_internal_list = internal_list_offset 721 for i, item in enumerate(obj): 722 item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item) 723 write_uint64(self._base_address, self._item_offset(i), item_offset) 724 else: 725 self._offset = offset 726 self._offset__data = offset + 8 * len(BaseObjOffsets) 727 self._offset__pointer_to_internal_list = self._offset__data 728 729 def raw_to_bytes(self, bytes_num: int) -> bytes: 730 start_index = self._pointer_to_internal_list 731 return self._shared_memory.read_mem(start_index, bytes_num) 732 # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num]) 733 734 @property 735 def _obj_size(self): 736 return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size) 737 738 @property 739 def _pointer_to_internal_list(self): 740 return read_uint64(self._base_address, self._offset__pointer_to_internal_list) 741 742 @_pointer_to_internal_list.setter 743 def _pointer_to_internal_list(self, value: Offset): 744 write_uint64(self._base_address, self._offset__pointer_to_internal_list, value) 745 746 @property 747 def _list_len(self): 748 return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * InternalListTrueOffsets.size) 749 750 @_list_len.setter 751 def _list_len(self, value: int): 752 write_uint64(self._base_address, self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * InternalListTrueOffsets.size, value) 753 754 @property 755 def _list_capacity(self): 756 return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * InternalListTrueOffsets.capacity) 757 758 def _item_offset(self, key: int) -> Offset: 759 return self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * len(InternalListTrueOffsets) + key * 8 760 761 def __len__(self) -> int: 762 return self._list_len 763 764 def get_children_offsets(self) -> List[Offset]: 765 return [read_uint64(self._base_address, self._item_offset(i)) for i in range(self._list_len)] 766 767 def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]: 768 if isinstance(key, int): 769 if key < 0: 770 key += len(self) 771 if key < 0 or key >= len(self): 772 raise IndexError 773 774 item_offset = read_uint64(self._base_address, self._item_offset(key)) 775 return self._shared_memory.get_obj(item_offset) 776 elif isinstance(key, slice): 777 if key.step is not None: 778 raise NotImplementedError 779 780 if key.start is None: 781 start = 0 782 elif key.start < 0: 783 start = key.start + len(self) 784 else: 785 start = key.start 786 787 if key.stop is None: 788 stop = len(self) 789 elif key.stop < 0: 790 stop = key.stop + len(self) 791 else: 792 stop = key.stop 793 794 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 795 raise IndexError 796 797 result_list = list() 798 for i in range(start, stop): 799 item_offset = read_uint64(self._base_address, self._item_offset(i)) 800 result_list.append(self._shared_memory.get_obj(item_offset)) 801 return result_list 802 else: 803 raise TypeError 804 805 def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence]) -> Any: 806 if isinstance(key, int): 807 if key < 0: 808 key += len(self) 809 if key < 0 or key >= len(self): 810 raise IndexError 811 812 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 813 write_uint64(self._base_address, self._item_offset(key), item_offset) 814 elif isinstance(key, slice): 815 if key.step is not None: 816 raise NotImplementedError 817 818 if key.start is None: 819 start = 0 820 elif key.start < 0: 821 start = key.start + len(self) 822 else: 823 start = key.start 824 825 if key.stop is None: 826 stop = len(self) 827 elif key.stop < 0: 828 stop = key.stop + len(self) 829 else: 830 stop = key.stop 831 832 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 833 raise IndexError 834 835 for i in range(start, stop): 836 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value[i - start]) 837 write_uint64(self._base_address, self._item_offset(i), item_offset) 838 else: 839 raise TypeError 840 841 def __delitem__(self, key: Union[int, slice]) -> None: 842 if isinstance(key, int): 843 if key < 0: 844 key += len(self) 845 if key < 0 or key >= len(self): 846 raise IndexError 847 848 for i in range(key + 1, len(self)): 849 item_offset = read_uint64(self._base_address, self._item_offset(i)) 850 self._shared_memory.free(item_offset) 851 write_uint64(self._base_address, self._item_offset(i - 1), item_offset) 852 853 self._list_len -= 1 854 elif isinstance(key, slice): 855 if key.step is not None: 856 raise NotImplementedError 857 858 if key.start is None: 859 start = 0 860 elif key.start < 0: 861 start = key.start + len(self) 862 else: 863 start = key.start 864 865 if key.stop is None: 866 stop = len(self) 867 elif key.stop < 0: 868 stop = key.stop + len(self) 869 else: 870 stop = key.stop 871 872 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 873 raise IndexError 874 875 for i in range(start, stop): 876 item_offset = read_uint64(self._base_address, self._item_offset(i)) 877 self._shared_memory.free(item_offset) 878 879 del_items_num = stop - start 880 881 for i in range(stop, len(self)): 882 item_offset = read_uint64(self._base_address, self._item_offset(i)) 883 write_uint64(self._base_address, self._item_offset(i - del_items_num), item_offset) 884 885 self._list_len -= del_items_num 886 else: 887 raise TypeError 888 889 def append(self, item: Any) -> None: 890 if self._list_len > self._list_capacity: 891 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 892 893 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 894 write_uint64(self._base_address, self._item_offset(self._list_len), item_offset) 895 self._list_len += 1 896 897 def extend(self, items: Sequence) -> None: 898 items_num = len(items) 899 if self._list_len + items_num > self._list_capacity: 900 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 901 902 for i, item in enumerate(items): 903 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 904 write_uint64(self._base_address, self._item_offset(self._list_len + i), item_offset) 905 906 self._list_len += items_num 907 908 def insert(self, index: int, item: Any) -> None: 909 if index < 0: 910 index += len(self) 911 if index < 0 or index > len(self): 912 raise IndexError 913 914 if self._list_len > self._list_capacity: 915 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 916 917 for i in range(self._list_len, index, -1): 918 item_offset = read_uint64(self._base_address, self._item_offset(i - 1)) 919 write_uint64(self._base_address, self._item_offset(i), item_offset) 920 921 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 922 write_uint64(self._base_address, self._item_offset(index), item_offset) 923 self._list_len += 1 924 925 def pop(self, index: int = -1) -> Any: 926 if index < 0: 927 index += len(self) 928 if index < 0 or index >= len(self): 929 raise IndexError 930 931 item_offset = read_uint64(self._base_address, self._item_offset(index)) 932 result = self._shared_memory.get_obj(item_offset) 933 934 for i in range(index + 1, len(self)): 935 item_offset = read_uint64(self._base_address, self._item_offset(i)) 936 write_uint64(self._base_address, self._item_offset(i - 1), item_offset) 937 938 self._list_len -= 1 939 return result 940 941 def remove(self, item: Any) -> None: 942 for i in range(len(self)): 943 item_offset = read_uint64(self._base_address, self._item_offset(i)) 944 if item_offset == item._offset: 945 for j in range(i + 1, len(self)): 946 item_offset = read_uint64(self._base_address, self._item_offset(j)) 947 write_uint64(self._base_address, self._item_offset(j - 1), item_offset) 948 949 self._list_len -= 1 950 return 951 952 raise ValueError 953 954 def clear(self) -> None: 955 for i in range(len(self)): 956 item_offset = read_uint64(self._base_address, self._item_offset(i)) 957 self._shared_memory.free(item_offset) 958 959 self._list_len = 0 960 961 def __iter__(self): 962 return IListIterator(self) 963 964 def __reversed__(self): 965 return IListReversedIterator(self) 966 967 def __contains__(self, item: Any) -> bool: 968 for i in range(len(self)): 969 item_offset = read_uint64(self._base_address, self._item_offset(i)) 970 if item_offset == item._offset: 971 return True 972 973 return False 974 975 def index(self, item: Any, start: int = 0, stop: int = None) -> int: 976 if stop is None: 977 stop = len(self) 978 979 for i in range(start, stop): 980 item_offset = read_uint64(self._base_address, self._item_offset(i)) 981 if item_offset == item._offset: 982 return i 983 984 raise ValueError 985 986 def count(self, item: Any) -> int: 987 result = 0 988 for i in range(len(self)): 989 item_offset = read_uint64(self._base_address, self._item_offset(i)) 990 if item_offset == item._offset: 991 result += 1 992 993 return result 994 995 def reverse(self) -> None: 996 for i in range(len(self) // 2): 997 item_offset = read_uint64(self._base_address, self._item_offset(i)) 998 write_uint64(self._base_address, self._item_offset(i), read_uint64(self._base_address, self._item_offset(len(self) - i - 1))) 999 write_uint64(self._base_address, self._item_offset(len(self) - i - 1), item_offset) 1000 1001 def sort(self, key: Any = None, reverse: bool = False) -> None: 1002 raise NotImplementedError 1003 1004 def copy(self) -> 'IList': 1005 result = IList(self._shared_memory) 1006 result.extend(self) 1007 return result 1008 1009 def __add__(self, other: Sequence) -> 'IList': 1010 result = IList(self._shared_memory) 1011 result.extend(self) 1012 result.extend(other) 1013 return result 1014 1015 def __iadd__(self, other: Sequence) -> 'IList': 1016 self.extend(other) 1017 return self 1018 1019 def __mul__(self, other: int) -> 'IList': 1020 result = IList(self._shared_memory) 1021 for i in range(other): 1022 result.extend(self) 1023 1024 return result 1025 1026 def __imul__(self, other: int) -> 'IList': 1027 my_copy: IList = self.copy() 1028 for i in range(other): 1029 self.extend(my_copy) 1030 1031 return self 1032 1033 def __rmul__(self, other: int) -> 'IList': 1034 return self.__mul__(other) 1035 1036 def __eq__(self, other: Sequence) -> bool: 1037 if len(self) != len(other): 1038 return False 1039 1040 for i in range(len(self)): 1041 if self[i] != other[i]: 1042 return False 1043 1044 return True 1045 1046 def __ne__(self, other: Sequence) -> bool: 1047 return not self.__eq__(other) 1048 1049 def __lt__(self, other: Sequence) -> bool: 1050 for i in range(len(self)): 1051 if self[i] >= other[i]: 1052 return False 1053 1054 return True 1055 1056 def __le__(self, other: Sequence) -> bool: 1057 for i in range(len(self)): 1058 if self[i] > other[i]: 1059 return False 1060 1061 return True 1062 1063 def __gt__(self, other: Sequence) -> bool: 1064 for i in range(len(self)): 1065 if self[i] <= other[i]: 1066 return False 1067 1068 return True 1069 1070 def __ge__(self, other: Sequence) -> bool: 1071 for i in range(len(self)): 1072 if self[i] < other[i]: 1073 return False 1074 1075 return True 1076 1077 def __repr__(self) -> str: 1078 return f'IList({list(self)})' 1079 1080 def __str__(self) -> str: 1081 return f'IList({list(self)})' 1082 1083 def __hash__(self) -> int: 1084 return hash(tuple(self)) 1085 1086 def __sizeof__(self) -> int: 1087 return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size) + read_uint64(self._base_address, self._pointer_to_internal_list, 8 * BaseObjOffsets.obj_size) 1088 1089 def export(self) -> list: 1090 return list(self) 1091 1092 # def __del__(self) -> None: 1093 # self._shared_memory.free(self._pointer_to_internal_list) 1094 # self._shared_memory.free(self._offset) 1095 1096 1097# ====================================================================================================================== 1098# === InternalList ===================================================================================================== 1099 1100 1101class InternalListOffsets(IntEnum): 1102 capacity = 0 1103 size = 1 1104 1105 1106class InternalListFieldOffsets(IntEnum): 1107 field_type = 0 1108 offset_or_data = 1 1109 1110 1111class InternalListFieldTypes(IntEnum): 1112 tnone = 0 1113 tobj = 1 1114 tint = 2 1115 tfloat = 3 1116 tbool = 4 1117 1118 1119def malloc_tinternal_list(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]: 1120 if (capacity is not None) and (size > capacity): 1121 raise ValueError 1122 1123 capacity = (size << 1 if size else 16) if capacity is None else capacity 1124 offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + capacity * bs * len(InternalListFieldOffsets), zero_mem=True) 1125 sys_data_offset = offset + bs * len(BaseObjOffsets) 1126 write_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.capacity, capacity) 1127 write_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.size, size) 1128 return offset, real_size 1129 1130 1131def realloc_tinternal_list(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]: 1132 if (desired_size is not None) and (new_capacity is not None) and (desired_size > new_capacity): 1133 raise ValueError 1134 1135 sys_data_offset = offset + bs * len(BaseObjOffsets) 1136 capacity = read_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.capacity) 1137 size = read_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.size) 1138 new_list_capacity = capacity << 1 if new_capacity is None else new_capacity 1139 if new_capacity is None: 1140 if desired_size is None: 1141 new_list_capacity = capacity << 1 if capacity else 16 1142 else: 1143 new_list_capacity = desired_size << 1 if desired_size else 16 1144 else: 1145 new_list_capacity = new_capacity 1146 1147 if new_list_capacity < size: 1148 new_list_capacity = size 1149 1150 if new_list_capacity == capacity: 1151 real_size = read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_size) 1152 return offset, real_size 1153 1154 new_offset, new_real_size = shared_memory.realloc( 1155 offset, 1156 bs * len(InternalListOffsets) + new_list_capacity * bs * len(InternalListFieldOffsets), 1157 loop_allowed, 1158 zero_mem 1159 ) 1160 new_sys_data_offset = new_offset + bs * len(BaseObjOffsets) 1161 write_uint64(shared_memory.base_address, new_sys_data_offset + bs * InternalListOffsets.capacity, new_list_capacity) 1162 return new_offset, new_real_size 1163 1164 1165def destroy_tinternal_list(shared_memory: 'SharedMemory', offset: Offset) -> None: 1166 shared_memory.free(offset) 1167 1168 1169def uint64_to_bytes(int_data: int) -> bytes: 1170 """ 1171 For a 64 bit unsigned int in little endian 1172 :param int_data: 1173 :return: bytes(); len == 8 1174 """ 1175 from struct import pack 1176 result = pack('<B', int_data) 1177 return result 1178 1179 1180def uint8_to_bytes(int_data: int) -> bytes: 1181 """ 1182 For a 64 bit unsigned int in little endian 1183 :param int_data: 1184 :return: bytes(); len == 8 1185 """ 1186 from struct import pack 1187 result = pack('<Q', int_data) 1188 return result 1189 1190 1191# ====================================================================================================================== 1192# === List ===================================================================================================== 1193 1194 1195class ListOffsets(IntEnum): 1196 internal_list_offset = 0 1197 1198 1199class IList(BaseIObject, list): 1200 __slots__ = ('_shared_memory', '_base_address', '_offset', '_offset__data', '_offset__pointer_to_internal_list') 1201 1202 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None: 1203 self._shared_memory = shared_memory 1204 self._base_address = shared_memory.base_address 1205 if offset is None: 1206 offset, real_size = shared_memory.malloc(ObjectType.tlist, bs * len(ListOffsets)) 1207 try: 1208 self._offset = offset 1209 self._offset__data = offset + bs * len(BaseObjOffsets) 1210 self._offset__pointer_to_internal_list = self._offset__data + bs * ListOffsets.internal_list_offset 1211 1212 if obj is None: 1213 obj = list() 1214 1215 data_len = len(obj) 1216 internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len) 1217 self._pointer_to_internal_list = internal_list_offset 1218 for i, item in enumerate(obj): 1219 # print(self.get_children_offsets()) 1220 # # print(self.raw_to_list(slice(0, None))) 1221 # print(self.raw_to_bytes(200)) 1222 self._write_item(i, item) 1223 # print(self.get_children_offsets()) 1224 # # print(self.raw_to_list(slice(0, None))) 1225 # print(self.raw_to_bytes(200)) 1226 1227 # print(self.get_children_offsets()) 1228 # # print(self.raw_to_list(slice(0, None))) 1229 # print(self.raw_to_bytes(200)) 1230 # print('=======================') 1231 except: 1232 self._free_mem() 1233 raise 1234 else: 1235 self._offset = offset 1236 self._offset__data = offset + bs * len(BaseObjOffsets) 1237 self._offset__pointer_to_internal_list = self._offset__data + bs * ListOffsets.internal_list_offset 1238 1239 def raw_to_list(self, key) -> List[bytes]: 1240 if isinstance(key, int): 1241 if key < 0: 1242 key += len(self) 1243 if key < 0 or key >= len(self): 1244 raise IndexError 1245 1246 item_offset = self._read_item_offset_or_data(key) 1247 return [uint64_to_bytes(item_offset)] 1248 elif isinstance(key, slice): 1249 if key.step is not None: 1250 raise NotImplementedError 1251 1252 if key.start is None: 1253 start = 0 1254 elif key.start < 0: 1255 start = key.start + len(self) 1256 else: 1257 start = key.start 1258 1259 if key.stop is None: 1260 stop = len(self) 1261 elif key.stop < 0: 1262 stop = key.stop + len(self) 1263 else: 1264 stop = key.stop 1265 1266 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1267 raise IndexError 1268 1269 result_list = list() 1270 for i in range(start, stop): 1271 item_offset = self._read_item_offset_or_data(i) 1272 result_list.append(uint64_to_bytes(item_offset)) 1273 1274 return result_list 1275 1276 def raw_to_bytes(self, bytes_num: int) -> bytes: 1277 start_index = self._pointer_to_internal_list 1278 return self._shared_memory.read_mem(start_index, bytes_num) 1279 # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num]) 1280 1281 @property 1282 def _obj_size(self): 1283 return read_uint64(self._base_address, self._offset + bs * BaseObjOffsets.obj_size) 1284 1285 @property 1286 def _pointer_to_internal_list(self): 1287 return read_uint64(self._base_address, self._offset__pointer_to_internal_list) 1288 1289 @_pointer_to_internal_list.setter 1290 def _pointer_to_internal_list(self, value: Offset): 1291 write_uint64(self._base_address, self._offset__pointer_to_internal_list, value) 1292 1293 @property 1294 def _list_len(self): 1295 return read_uint64(self._base_address, self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * InternalListOffsets.size) 1296 1297 @_list_len.setter 1298 def _list_len(self, value: int): 1299 write_uint64(self._base_address, self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * InternalListOffsets.size, value) 1300 1301 @property 1302 def _list_capacity(self): 1303 return read_uint64(self._base_address, self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * InternalListOffsets.capacity) 1304 1305 def _item_offset(self, key: int) -> Offset: 1306 return self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) 1307 1308 def _item_type_offset(self, key: int) -> Offset: 1309 # from os import getpid 1310 result = self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1311 # add_0 = bs * len(BaseObjOffsets) 1312 # add_1 = bs * len(InternalListOffsets) 1313 # add_2 = key * bs * len(InternalListFieldOffsets) 1314 # add_3 = bs * InternalListFieldOffsets.field_type 1315 # print(f'PID: {getpid()}. [{add_0},{add_1},{add_2},{add_3}],{add_0 + add_1 + add_2 + add_3},{self._pointer_to_internal_list}: item_type_offset: {key}:{result}') 1316 return result 1317 1318 def _item_value_offset(self, key: int) -> Offset: 1319 # from os import getpid 1320 result = self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data 1321 # print(f'PID: {getpid()}. {bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data},{self._pointer_to_internal_list}: item_value_offset: {key}:{result}') 1322 return result 1323 1324 def _read_item_type(self, key: int) -> int: 1325 return read_uint64(self._base_address, self._item_type_offset(key)) 1326 1327 def _write_item_type(self, key: int, item_type: int) -> None: 1328 write_uint64(self._base_address, self._item_type_offset(key), item_type) 1329 1330 def _read_item_offset_or_data(self, key: int) -> Union[Offset, int]: 1331 return read_uint64(self._base_address, self._item_value_offset(key)) 1332 1333 def _write_item_offset_or_data(self, key: int, offset_or_data: Union[Offset, int]) -> None: 1334 write_uint64(self._base_address, self._item_value_offset(key), offset_or_data) 1335 1336 # def _determine_obj_type(self, obj: Any) -> int: 1337 # if isinstance(obj, int): 1338 # return 1 1339 # elif isinstance(obj, float): 1340 # return 2 1341 # elif isinstance(obj, bool): 1342 # return 3 1343 # else: 1344 # return 0 1345 1346 def _determine_obj_type(self, obj: Any) -> int: 1347 if type(obj) is int: 1348 return InternalListFieldTypes.tint.value 1349 elif type(obj) is float: 1350 return InternalListFieldTypes.tfloat.value 1351 elif type(obj) is bool: 1352 return InternalListFieldTypes.tbool.value 1353 elif obj is None: 1354 return InternalListFieldTypes.tnone.value 1355 else: 1356 return InternalListFieldTypes.tobj.value 1357 1358 def _determine_obj_offset(self, obj: Any) -> Optional[Offset]: 1359 if isinstance(obj, BaseIObject): 1360 return obj._offset 1361 else: 1362 return None 1363 1364 def _compare_item_to_obj_fast(self, key: int, obj: Any, obj_type: int, obj_offset) -> bool: 1365 result: bool = False 1366 item_type = self._read_item_type(key) 1367 if item_type == obj_type: 1368 if item_type == InternalListFieldTypes.tobj.value: 1369 if obj_offset is None: 1370 if self._read_item_value(key, item_type) == obj: 1371 result = True 1372 else: 1373 if self._read_item_offset_or_data(key) == obj_offset: 1374 result = True 1375 elif item_type == InternalListFieldTypes.tint.value: 1376 if self._read_item_offset_or_data(key) == obj: 1377 result = True 1378 elif item_type == InternalListFieldTypes.tfloat.value: 1379 if self._read_item_offset_or_data(key) == obj: 1380 result = True 1381 elif item_type == InternalListFieldTypes.tbool.value: 1382 if self._read_item_offset_or_data(key) == obj: 1383 result = True 1384 elif item_type == InternalListFieldTypes.tnone.value: 1385 result = obj is None 1386 else: 1387 raise ValueError 1388 1389 return result 1390 1391 def _compare_item_to_obj(self, key: int, obj: Any) -> bool: 1392 obj_type = self._determine_obj_type(obj) 1393 obj_offset = self._determine_obj_offset(obj) 1394 return self._compare_item_to_obj_fast(key, obj, obj_type, obj_offset) 1395 1396 def _read_item_value(self, key: int, item_type: int) -> Any: 1397 if item_type == InternalListFieldTypes.tobj.value: 1398 item_offset = read_uint64(self._base_address, self._item_value_offset(key)) 1399 return self._shared_memory.get_obj(item_offset) 1400 elif item_type == InternalListFieldTypes.tint.value: 1401 return read_int64(self._base_address, self._item_value_offset(key)) 1402 elif item_type == InternalListFieldTypes.tfloat.value: 1403 return read_double(self._base_address, self._item_value_offset(key)) 1404 elif item_type == InternalListFieldTypes.tbool.value: 1405 return bool(read_uint64(self._base_address, self._item_value_offset(key))) 1406 elif item_type == InternalListFieldTypes.tnone.value: 1407 return None 1408 else: 1409 raise ValueError 1410 1411 def _write_item_value(self, key: int, item_type: int, value: Any) -> None: 1412 if item_type == InternalListFieldTypes.tobj.value: 1413 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 1414 write_uint64(self._base_address, self._item_value_offset(key), item_offset) 1415 elif item_type == InternalListFieldTypes.tint.value: 1416 write_int64(self._base_address, self._item_value_offset(key), value) 1417 elif item_type == InternalListFieldTypes.tfloat.value: 1418 write_double(self._base_address, self._item_value_offset(key), value) 1419 elif item_type == InternalListFieldTypes.tbool.value: 1420 write_uint64(self._base_address, self._item_value_offset(key), int(value)) 1421 elif item_type == InternalListFieldTypes.tnone.value: 1422 pass 1423 else: 1424 raise ValueError 1425 1426 def _free_item_value(self, key: int, item_type: int) -> None: 1427 if item_type == InternalListFieldTypes.tobj.value: 1428 item_offset = read_uint64(self._base_address, self._item_value_offset(key)) 1429 # self._shared_memory.free(item_offset) 1430 self._shared_memory.destroy_obj(item_offset) 1431 elif item_type == InternalListFieldTypes.tint.value: 1432 pass 1433 elif item_type == InternalListFieldTypes.tfloat.value: 1434 pass 1435 elif item_type == InternalListFieldTypes.tbool.value: 1436 pass 1437 elif item_type == InternalListFieldTypes.tnone.value: 1438 pass 1439 else: 1440 raise ValueError 1441 1442 self._write_item_type(key, InternalListFieldTypes.tnone.value) 1443 1444 def _read_item_type_and_value(self, key: int) -> Tuple[int, Any]: 1445 item_type = self._read_item_type(key) 1446 return item_type, self._read_item_value(key, item_type) 1447 1448 def _write_item_value_and_get_type(self, key: int, value: Any) -> int: 1449 if isinstance(value, int): 1450 write_uint64(self._base_address, self._item_value_offset(key), value) 1451 return InternalListFieldTypes.tint.value 1452 elif isinstance(value, float): 1453 write_double(self._base_address, self._item_value_offset(key), value) 1454 return InternalListFieldTypes.tfloat.value 1455 elif isinstance(value, bool): 1456 write_uint64(self._base_address, self._item_value_offset(key), int(value)) 1457 return InternalListFieldTypes.tbool.value 1458 elif value is None: 1459 return InternalListFieldTypes.tnone.value 1460 else: 1461 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 1462 write_uint64(self._base_address, self._item_value_offset(key), item_offset) 1463 return InternalListFieldTypes.tobj.value 1464 1465 def _free_item_value_and_get_type(self, key: int) -> int: 1466 item_type = self._read_item_type(key) 1467 self._free_item_value(key, item_type) 1468 return item_type 1469 1470 def _read_item(self, key: int) -> Any: 1471 item_type = self._read_item_type(key) 1472 return self._read_item_value(key, item_type) 1473 1474 def _write_item(self, key: int, value: Any) -> None: 1475 item_type = self._write_item_value_and_get_type(key, value) 1476 self._write_item_type(key, item_type) 1477 1478 def _free_item(self, key: int) -> None: 1479 item_type = self._read_item_type(key) 1480 self._free_item_value(key, item_type) 1481 1482 def _copy_item(self, src_key: int, dst_key: int) -> None: 1483 self._write_item_type(dst_key, self._read_item_type(src_key)) 1484 self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1485 1486 def copy_item(self, src_key: int, dst_key: int) -> None: 1487 return self._copy_item(src_key, dst_key) 1488 1489 def _move_item(self, src_key: int, dst_key: int) -> None: 1490 self._write_item_type(dst_key, self._read_item_type(src_key)) 1491 self._write_item_type(src_key, InternalListFieldTypes.tnone.value) 1492 self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1493 1494 def move_item(self, src_key: int, dst_key: int) -> None: 1495 return self._move_item(src_key, dst_key) 1496 1497 def copy_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None: 1498 other._write_item_type(dst_key, self._read_item_type(src_key)) 1499 other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1500 1501 def move_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None: 1502 other._write_item_type(dst_key, self._read_item_type(src_key)) 1503 self._write_item_type(src_key, InternalListFieldTypes.tnone.value) 1504 other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1505 1506 def _swap_items(self, key1: int, key2: int) -> None: 1507 item_type1 = self._read_item_type(key1) 1508 item_offset_or_data1 = self._read_item_offset_or_data(key1) 1509 self._write_item_type(key1, self._read_item_type(key2)) 1510 self._write_item_type(key2, item_type1) 1511 self._write_item_offset_or_data(key1, self._read_item_offset_or_data(key2)) 1512 self._write_item_offset_or_data(key2, item_offset_or_data1) 1513 1514 def swap_items(self, key1: int, key2: int) -> None: 1515 return self._swap_items(key1, key2) 1516 1517 def __len__(self) -> int: 1518 return self._list_len 1519 1520 def get_children_data_or_offsets(self) -> List[Offset]: 1521 return [self._read_item_offset_or_data(i) for i in range(self._list_len)] 1522 1523 def get_children_offsets(self): 1524 return self.get_children_data_or_offsets() 1525 1526 def _getitem_as_offset(self, key: int) -> Tuple[int, Offset]: 1527 return list__get_item_as_offset(key, self._base_address, self._offset__pointer_to_internal_list) 1528 1529 def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]: 1530 if isinstance(key, int): 1531 base_address = self._base_address 1532 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1533 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1534 self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1535 if key < 0 or key >= self_len: 1536 raise IndexError 1537 1538 return list__get_item(key, self._base_address, self._offset__pointer_to_internal_list, self._shared_memory.get_obj) 1539 1540 # base_address = self._base_address 1541 # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1542 # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1543 # self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1544 1545 # if key < 0: 1546 # key += self_len 1547 1548 # if key < 0 or key >= self_len: 1549 # raise IndexError 1550 1551 # item_type_offset = pointer_to_internal_list + 32 + key * 16 1552 # item_value_offset = pointer_to_internal_list + 40 + key * 16 1553 # item_type = read_uint64(base_address, item_type_offset) 1554 # if item_type == 1: 1555 # return read_int64(base_address, item_value_offset) 1556 # elif item_type == 2: 1557 # return read_double(base_address, item_value_offset) 1558 # elif item_type == 3: 1559 # return bool(read_uint64(base_address, item_value_offset)) 1560 # elif item_type == 0: 1561 # item_offset = read_uint64(base_address, item_value_offset) 1562 # return self._shared_memory.get_obj(item_offset) 1563 # else: 1564 # raise ValueError 1565 1566 # # return self._read_item(key) 1567 elif isinstance(key, slice): 1568 if key.step is not None: 1569 raise NotImplementedError 1570 1571 if key.start is None: 1572 start = 0 1573 elif key.start < 0: 1574 start = key.start + len(self) 1575 else: 1576 start = key.start 1577 1578 if key.stop is None: 1579 stop = len(self) 1580 elif key.stop < 0: 1581 stop = key.stop + len(self) 1582 else: 1583 stop = key.stop 1584 1585 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1586 raise IndexError 1587 1588 result_list = list() 1589 # performance improvement instead of using self._read_item(i) 1590 base_address = self._base_address 1591 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1592 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1593 1594 # item_type_offset = pointer_to_internal_list + 32 + i * 16 1595 item_type_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1596 1597 # item_value_offset = pointer_to_internal_list + 40 + i * 16 1598 item_value_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data 1599 1600 for i in range(start, stop): 1601 # result_list.append(self._read_item(i)) 1602 1603 # performance improvement instead of using self._read_item(i) 1604 item_type = read_uint64(base_address, item_type_offset) 1605 if item_type == InternalListFieldTypes.tint.value: 1606 result_list.append(read_int64(base_address, item_value_offset)) 1607 elif item_type == InternalListFieldTypes.tfloat.value: 1608 result_list.append(read_double(base_address, item_value_offset)) 1609 elif item_type == InternalListFieldTypes.tbool.value: 1610 result_list.append(bool(read_uint64(base_address, item_value_offset))) 1611 elif item_type == InternalListFieldTypes.tnone.value: 1612 result_list.append(None) 1613 elif item_type == InternalListFieldTypes.tobj.value: 1614 item_offset = read_uint64(base_address, item_value_offset) 1615 result_list.append(self._shared_memory.get_obj(item_offset)) 1616 else: 1617 raise ValueError 1618 1619 return result_list 1620 else: 1621 raise TypeError 1622 1623 def _setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item: bool = True) -> Any: 1624 value_item_type, value_item_offset = value_type_and_offset 1625 list__set_item_as_offset(key, value_item_type, value_item_offset, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj) 1626 1627 def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence], need_to_free_item: bool = True) -> Any: 1628 if isinstance(key, int): 1629 # print(f'{key=}, {value=}, {need_to_free_item=}') 1630 # internal_list_data_offset = self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1631 # internal_list_data_size = self._list_len * bs * len(InternalListFieldOffsets) 1632 # self._shared_memory.print_mem(internal_list_data_offset, internal_list_data_size, 'internal_list before list__set_item') 1633 1634 base_address = self._base_address 1635 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1636 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1637 self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1638 if key < 0 or key >= self_len: 1639 raise IndexError 1640 1641 list__set_item(key, value, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj, self._shared_memory.put_obj) 1642 1643 # base_address = self._base_address 1644 # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1645 # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1646 # self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1647 1648 # if key < 0: 1649 # key += self_len 1650 1651 # if key < 0 or key >= self_len: 1652 # raise IndexError 1653 1654 # item_type_offset = pointer_to_internal_list + 32 + key * 16 1655 # item_value_offset = pointer_to_internal_list + 40 + key * 16 1656 # if isinstance(value, int): 1657 # write_int64(base_address, item_value_offset, value) 1658 # item_type = 1 1659 # elif isinstance(value, float): 1660 # write_double(base_address, item_value_offset, value) 1661 # item_type = 2 1662 # elif isinstance(value, bool): 1663 # write_uint64(base_address, item_value_offset, int(value)) 1664 # item_type = 3 1665 # else: 1666 # item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 1667 # write_uint64(base_address, item_value_offset, item_offset) 1668 # item_type = 0 1669 1670 # write_uint64(base_address, item_type_offset, item_type) 1671 1672 # # self._write_item(key, value) 1673 elif isinstance(key, slice): 1674 if key.step is not None: 1675 raise NotImplementedError 1676 1677 if key.start is None: 1678 start = 0 1679 elif key.start < 0: 1680 start = key.start + len(self) 1681 else: 1682 start = key.start 1683 1684 if key.stop is None: 1685 stop = len(self) 1686 elif key.stop < 0: 1687 stop = key.stop + len(self) 1688 else: 1689 stop = key.stop 1690 1691 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1692 raise IndexError 1693 1694 if need_to_free_item: 1695 for i in range(start, stop): 1696 self._free_item(i) 1697 1698 # performance improvement instead of using self._write_item(i, item) 1699 base_address = self._base_address 1700 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1701 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1702 1703 # item_type_offset = pointer_to_internal_list + 32 + i * 16 1704 item_type_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1705 1706 # item_value_offset = pointer_to_internal_list + 40 + i * 16 1707 item_value_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data 1708 1709 for i in range(start, stop): 1710 item = value[i - start] 1711 # self._write_item(i, item) 1712 1713 # performance improvement instead of using self._write_item(i, item) 1714 if isinstance(item, int): 1715 write_int64(base_address, item_value_offset, item) 1716 item_type = InternalListFieldTypes.tint.value 1717 elif isinstance(item, float): 1718 write_double(base_address, item_value_offset, item) 1719 item_type = InternalListFieldTypes.tfloat.value 1720 elif isinstance(item, bool): 1721 write_uint64(base_address, item_value_offset, int(item)) 1722 item_type = InternalListFieldTypes.tbool.value 1723 elif item is None: 1724 item_type = InternalListFieldTypes.tnone.value 1725 else: 1726 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 1727 write_uint64(base_address, item_value_offset, item_offset) 1728 item_type = InternalListFieldTypes.tobj.value 1729 1730 write_uint64(base_address, item_type_offset, item_type) 1731 else: 1732 raise TypeError 1733 1734 def __delitem__(self, key: Union[int, slice], need_to_free_item: bool = True) -> None: 1735 if isinstance(key, int): 1736 if key < 0: 1737 key += len(self) 1738 if key < 0 or key >= len(self): 1739 raise IndexError 1740 1741 if need_to_free_item: 1742 self._free_item(key) 1743 1744 for i in range(key + 1, len(self)): 1745 self._move_item(i, i - 1) 1746 1747 self._list_len -= 1 1748 elif isinstance(key, slice): 1749 if key.step is not None: 1750 raise NotImplementedError 1751 1752 if key.start is None: 1753 start = 0 1754 elif key.start < 0: 1755 start = key.start + len(self) 1756 else: 1757 start = key.start 1758 1759 if key.stop is None: 1760 stop = len(self) 1761 elif key.stop < 0: 1762 stop = key.stop + len(self) 1763 else: 1764 stop = key.stop 1765 1766 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1767 raise IndexError 1768 1769 if need_to_free_item: 1770 for i in range(start, stop): 1771 self._free_item(i) 1772 1773 del_items_num = stop - start 1774 1775 for i in range(stop, len(self)): 1776 self._move_item(i, i - del_items_num) 1777 1778 self._list_len -= del_items_num 1779 else: 1780 raise TypeError 1781 1782 def append(self, item: Any) -> None: 1783 if self._list_len > self._list_capacity: 1784 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1785 1786 self._list_len += 1 1787 self.__setitem__(self._list_len - 1, item, need_to_free_item=False) 1788 1789 def append_as_offset(self, value_type_and_offset: Tuple[int, Offset]) -> None: 1790 if self._list_len > self._list_capacity: 1791 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1792 1793 self._list_len += 1 1794 self._setitem_as_offset(self._list_len - 1, value_type_and_offset, need_to_free_item=False) 1795 1796 def getitem_as_offset(self, key: int) -> Tuple[int, Offset]: 1797 return self._getitem_as_offset(key) 1798 1799 def setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item=True) -> None: 1800 self._setitem_as_offset(key, value_type_and_offset, need_to_free_item) 1801 1802 def extend(self, items: Sequence) -> None: 1803 items_num = len(items) 1804 if (self._list_len + items_num) > self._list_capacity: 1805 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 1806 1807 original_list_len = self._list_len 1808 self._list_len += items_num 1809 for i, item in enumerate(items): 1810 self.__setitem__(original_list_len + i, item, need_to_free_item=False) 1811 1812 def extend_with(self, items_num: int, value = None) -> None: 1813 if (self._list_len + items_num) > self._list_capacity: 1814 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 1815 1816 original_list_len = self._list_len 1817 self._list_len += items_num 1818 for i in range(items_num): 1819 self.__setitem__(original_list_len + i, value, need_to_free_item=False) 1820 1821 def set_capacity(self, capacity: int) -> int: 1822 if capacity <= self._list_capacity: 1823 return 1824 1825 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, capacity) 1826 return result_size 1827 1828 def insert(self, index: int, item: Any) -> None: 1829 if index < 0: 1830 index += len(self) 1831 if index < 0 or index > len(self): 1832 raise IndexError 1833 1834 if self._list_len > self._list_capacity: 1835 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'before realloc. {}') 1836 # self.print_internal_list('before realloc. {}') 1837 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1838 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after realloc. {}') 1839 # self.print_internal_list('after realloc. {}') 1840 1841 # self.print_internal_list('before inserting {}') 1842 self._list_len += 1 1843 # self.print_internal_list('before inserting but after +1 {}') 1844 for i in range(self._list_len - 1, index, -1): 1845 self._move_item(i - 1, i) 1846 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, f'after self._move_item({i - 1, i}). {{}}') 1847 # self.print_internal_list(f'after self._move_item({i - 1, i}). {{}}') 1848 1849 self.__setitem__(index, item, need_to_free_item=False) 1850 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after inserting. {}') 1851 # self.print_internal_list('after inserting. {}') 1852 1853 def print_internal_list(self, text: str = None, additional_cells: int = 0): 1854 internal_list = self._shared_memory.read_mem(self._pointer_to_internal_list, bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + self._list_len * bs * len(InternalListFieldOffsets) + additional_cells * bs * len(InternalListFieldOffsets)) 1855 print('--- internal list -------------') 1856 if text: 1857 print(text.format(self._pointer_to_internal_list)) 1858 print('------') 1859 1860 index = 0 1861 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1862 index += bs 1863 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1864 index += bs 1865 print('---') 1866 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1867 index += bs 1868 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1869 index += bs 1870 print('---') 1871 for i in range(self._list_len): 1872 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs * 2]) 1873 index += bs * 2 1874 1875 if additional_cells: 1876 print('------') 1877 for i in range(additional_cells): 1878 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1879 index += bs * 2 1880 print('-------------------------------') 1881 print() 1882 1883 def pop(self, index: int = -1) -> Any: 1884 if index < 0: 1885 index += len(self) 1886 if index < 0 or index >= len(self): 1887 raise IndexError 1888 1889 result = self.__getitem__(index) 1890 1891 for i in range(index + 1, len(self)): 1892 self._move_item(i, i - 1) 1893 1894 self._list_len -= 1 1895 return result 1896 1897 def remove(self, obj: Any) -> None: 1898 obj_type = self._determine_obj_type(obj) 1899 obj_offset = self._determine_obj_offset(obj) 1900 found_in_index = None 1901 for i in range(len(self)): 1902 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1903 found_in_index = i 1904 break 1905 1906 if found_in_index is None: 1907 raise ValueError 1908 else: 1909 self.__delitem__(found_in_index) 1910 1911 def clear(self, need_to_free_item: bool = True) -> None: 1912 if need_to_free_item: 1913 for i in range(len(self)): 1914 self._free_item(i) 1915 1916 self._list_len = 0 1917 1918 def __iter__(self): 1919 return IListIterator(self) 1920 1921 def __reversed__(self): 1922 return IListReversedIterator(self) 1923 1924 def __contains__(self, obj: Any) -> bool: 1925 obj_type = self._determine_obj_type(obj) 1926 obj_offset = self._determine_obj_offset(obj) 1927 found_in_index = None 1928 for i in range(len(self)): 1929 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1930 found_in_index = i 1931 break 1932 1933 if found_in_index is None: 1934 return False 1935 else: 1936 return True 1937 1938 def index(self, obj: Any, start: int = 0, stop: int = None) -> int: 1939 if stop is None: 1940 stop = len(self) 1941 1942 obj_type = self._determine_obj_type(obj) 1943 obj_offset = self._determine_obj_offset(obj) 1944 found_in_index = None 1945 for i in range(start, stop): 1946 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1947 found_in_index = i 1948 break 1949 1950 if found_in_index is None: 1951 raise ValueError 1952 else: 1953 return found_in_index 1954 1955 def count(self, obj: Any) -> int: 1956 obj_type = self._determine_obj_type(obj) 1957 obj_offset = self._determine_obj_offset(obj) 1958 result = 0 1959 for i in range(len(self)): 1960 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1961 result += 1 1962 1963 return result 1964 1965 def reverse(self) -> None: 1966 my_len = len(self) 1967 for i in range(my_len // 2): 1968 self._swap_items(i, my_len - i - 1) 1969 1970 def sort(self, key: Any = None, reverse: bool = False) -> None: 1971 raise NotImplementedError 1972 1973 def copy(self) -> 'IList': 1974 result = IList(self._shared_memory) 1975 result.extend(self) 1976 return result 1977 1978 def __add__(self, other: Sequence) -> 'IList': 1979 result = IList(self._shared_memory) 1980 result.extend(self) 1981 result.extend(other) 1982 return result 1983 1984 def __iadd__(self, other: Sequence) -> 'IList': 1985 self.extend(other) 1986 return self 1987 1988 def __mul__(self, other: int) -> 'IList': 1989 result = IList(self._shared_memory) 1990 for i in range(other): 1991 result.extend(self) 1992 1993 return result 1994 1995 def __imul__(self, other: int) -> 'IList': 1996 my_copy: IList = self.copy() 1997 for i in range(other): 1998 self.extend(my_copy) 1999 2000 return self 2001 2002 def __rmul__(self, other: int) -> 'IList': 2003 return self.__mul__(other) 2004 2005 def __eq__(self, other: Sequence) -> bool: 2006 if len(self) != len(other): 2007 return False 2008 2009 for i in range(len(self)): 2010 if self[i] != other[i]: 2011 return False 2012 2013 return True 2014 2015 def __ne__(self, other: Sequence) -> bool: 2016 return not self.__eq__(other) 2017 2018 def __lt__(self, other: Sequence) -> bool: 2019 for i in range(len(self)): 2020 if self[i] >= other[i]: 2021 return False 2022 2023 return True 2024 2025 def __le__(self, other: Sequence) -> bool: 2026 for i in range(len(self)): 2027 if self[i] > other[i]: 2028 return False 2029 2030 return True 2031 2032 def __gt__(self, other: Sequence) -> bool: 2033 for i in range(len(self)): 2034 if self[i] <= other[i]: 2035 return False 2036 2037 return True 2038 2039 def __ge__(self, other: Sequence) -> bool: 2040 for i in range(len(self)): 2041 if self[i] < other[i]: 2042 return False 2043 2044 return True 2045 2046 def __repr__(self) -> str: 2047 return f'IList({list(self)})' 2048 2049 def __str__(self) -> str: 2050 return f'IList({list(self)})' 2051 2052 def __hash__(self) -> int: 2053 return hash(tuple(self)) 2054 2055 def __sizeof__(self) -> int: 2056 return bs * len(BaseObjOffsets) + read_uint64(self._base_address, self._offset + bs * BaseObjOffsets.obj_size) + bs * len(BaseObjOffsets) + read_uint64(self._base_address, self._pointer_to_internal_list, bs * BaseObjOffsets.obj_size) 2057 2058 def export(self) -> list: 2059 return list(self) 2060 2061 # def __del__(self) -> None: 2062 # self._shared_memory.free(self._pointer_to_internal_list) 2063 # self._shared_memory.free(self._offset) 2064 2065 def _free_mem(self): 2066 if self._offset is not None: 2067 if self._pointer_to_internal_list is not None: 2068 self.clear() 2069 destroy_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 2070 self._pointer_to_internal_list = 0 2071 2072 self._shared_memory.free(self._offset) 2073 self._offset = None 2074 2075 2076# IList = IListTrue 2077 2078 2079class IListIterator: 2080 def __init__(self, ilist: IList) -> None: 2081 self._ilist = ilist 2082 self._index = 0 2083 2084 def __next__(self): 2085 if self._index < len(self._ilist): 2086 # self._ilist.print_internal_list(f'ListIterator[{self._index}]. {{}}') 2087 result = self._ilist[self._index] 2088 self._index += 1 2089 return result 2090 else: 2091 raise StopIteration 2092 2093 def __iter__(self): 2094 return self 2095 2096 2097class IListReversedIterator: 2098 def __init__(self, ilist: IList) -> None: 2099 self._ilist = ilist 2100 self._index = len(ilist) - 1 2101 2102 def __next__(self): 2103 if self._index >= 0: 2104 result = self._ilist[self._index] 2105 self._index -= 1 2106 return result 2107 else: 2108 raise StopIteration 2109 2110 def __iter__(self): 2111 return self 2112 2113 2114class TList: 2115 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: list) -> Tuple[list, Offset, Size]: 2116 obj = IList(shared_memory, obj=obj) 2117 return obj, obj._offset, obj._obj_size 2118 2119 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2120 if ObjectType.tlist != read_uint64(shared_memory.base_address, offset): 2121 raise WrongObjectTypeError 2122 2123 return IList(shared_memory, offset) 2124 2125 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 2126 if ObjectType.tlist != read_uint64(shared_memory.base_address, offset): 2127 raise WrongObjectTypeError 2128 2129 obj: IList = IList(shared_memory, offset) 2130 obj._free_mem() 2131 2132 2133# ====================================================================================================================== 2134# === Tuple ============================================================================================================ 2135 2136 2137class TupleOffsets(IntEnum): 2138 size = 0 2139 2140 2141class TupleFieldOffsets(IntEnum): 2142 item_offset = 0 2143 2144 2145class TTuple: 2146 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: tuple) -> Tuple[tuple, Offset, Size]: 2147 offset, real_size = shared_memory.malloc(ObjectType.ttuple, bs * len(TupleOffsets) + len(obj) * bs * len(TupleFieldOffsets)) 2148 created_items_offsets: List[Offset] = list() 2149 try: 2150 if (1, [2, 3]) == obj: 2151 shared_memory.offset_to_be_monitored = offset 2152 2153 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TupleOffsets.size, len(obj)) 2154 for i, item in enumerate(obj): 2155 item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item) 2156 created_items_offsets.append(item_offset) 2157 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * len(TupleOffsets) + i * bs * len(TupleFieldOffsets), item_offset) 2158 except: 2159 shared_memory.free(offset) 2160 for item_offset in created_items_offsets: 2161 shared_memory.destroy_obj(item_offset) 2162 2163 raise 2164 2165 return obj, offset, real_size 2166 2167 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2168 if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset): 2169 raise WrongObjectTypeError 2170 2171 result_list = list() 2172 size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TupleOffsets.size) 2173 for i in range(size): 2174 item_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * len(TupleOffsets) + i * bs * len(TupleFieldOffsets)) 2175 result_list.append(shared_memory.get_obj(item_offset)) 2176 2177 return tuple(result_list) 2178 2179 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 2180 if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset): 2181 raise WrongObjectTypeError 2182 2183 size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TupleOffsets.size) 2184 for i in range(size): 2185 item_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * len(TupleOffsets) + i * bs * len(TupleFieldOffsets)) 2186 shared_memory.destroy_obj(item_offset) 2187 2188 shared_memory.free(offset) 2189 2190 2191# ====================================================================================================================== 2192# === DatetimeTypes ============================================================================================================= 2193 2194 2195class DatetimeOffsets(IntEnum): 2196 data_bytes_offset = 0 2197 2198 2199DatetimeTypes = Union[datetime, timedelta, timezone, date, time] 2200 2201 2202class TDatetime: 2203 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: DatetimeTypes) -> Tuple[DatetimeTypes, Offset, Size]: 2204 offset, real_size = shared_memory.malloc(ObjectType.tdatetime, bs * len(DatetimeOffsets)) 2205 created_items_offsets: List[Offset] = list() 2206 try: 2207 data_tuple_mapped_obj, data_bytes_offset, data_tuple_size = shared_memory.put_obj(pickle_dumps(obj)) 2208 created_items_offsets.append(data_bytes_offset) 2209 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DatetimeOffsets.data_bytes_offset, data_bytes_offset) 2210 except: 2211 shared_memory.free(offset) 2212 for item_offset in created_items_offsets: 2213 shared_memory.destroy_obj(item_offset) 2214 2215 raise 2216 2217 return pickle_loads(data_tuple_mapped_obj), offset, real_size 2218 2219 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> DatetimeTypes: 2220 if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset): 2221 raise WrongObjectTypeError 2222 2223 data_bytes_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DatetimeOffsets.data_bytes_offset) 2224 result_tuple = shared_memory.get_obj(data_bytes_offset) 2225 return pickle_loads(result_tuple) 2226 2227 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2228 if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset): 2229 raise WrongObjectTypeError 2230 2231 data_bytes_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DatetimeOffsets.data_bytes_offset) 2232 shared_memory.destroy_obj(data_bytes_offset) 2233 shared_memory.free(offset) 2234 2235 2236# ====================================================================================================================== 2237# === Decimal ============================================================================================================= 2238 2239 2240class DecimalOffsets(IntEnum): 2241 data_tuple_offset = 0 2242 2243 2244class TDecimal: 2245 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Decimal) -> Tuple[Decimal, Offset, Size]: 2246 offset, real_size = shared_memory.malloc(ObjectType.tdecimal, bs * len(DecimalOffsets)) 2247 created_items_offsets: List[Offset] = list() 2248 try: 2249 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.as_tuple())) 2250 created_items_offsets.append(data_tuple_offset) 2251 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DecimalOffsets.data_tuple_offset, data_tuple_offset) 2252 except: 2253 shared_memory.free(offset) 2254 for item_offset in created_items_offsets: 2255 shared_memory.destroy_obj(item_offset) 2256 2257 raise 2258 2259 return Decimal(data_tuple_mapped_obj), offset, real_size 2260 2261 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Decimal: 2262 if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset): 2263 raise WrongObjectTypeError 2264 2265 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DecimalOffsets.data_tuple_offset) 2266 result_tuple = shared_memory.get_obj(data_tuple_offset) 2267 return Decimal(result_tuple) 2268 2269 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2270 if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset): 2271 raise WrongObjectTypeError 2272 2273 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DecimalOffsets.data_tuple_offset) 2274 shared_memory.destroy_obj(data_tuple_offset) 2275 shared_memory.free(offset) 2276 2277 2278# ====================================================================================================================== 2279# === Slice ============================================================================================================= 2280 2281 2282class SliceOffsets(IntEnum): 2283 data_tuple_offset = 0 2284 2285 2286class TSlice: 2287 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: slice) -> Tuple[slice, Offset, Size]: 2288 offset, real_size = shared_memory.malloc(ObjectType.tslice, bs * len(SliceOffsets)) 2289 created_items_offsets: List[Offset] = list() 2290 try: 2291 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.start, obj.stop, obj.step)) 2292 created_items_offsets.append(data_tuple_offset) 2293 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SliceOffsets.data_tuple_offset, data_tuple_offset) 2294 except: 2295 shared_memory.free(offset) 2296 for item_offset in created_items_offsets: 2297 shared_memory.destroy_obj(item_offset) 2298 2299 raise 2300 2301 return slice(*data_tuple_mapped_obj), offset, real_size 2302 2303 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> slice: 2304 if ObjectType.tslice != read_uint64(shared_memory.base_address, offset): 2305 raise WrongObjectTypeError 2306 2307 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SliceOffsets.data_tuple_offset) 2308 result_tuple = shared_memory.get_obj(data_tuple_offset) 2309 return slice(*result_tuple) 2310 2311 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2312 if ObjectType.tslice != read_uint64(shared_memory.base_address, offset): 2313 raise WrongObjectTypeError 2314 2315 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SliceOffsets.data_tuple_offset) 2316 shared_memory.destroy_obj(data_tuple_offset) 2317 shared_memory.free(offset) 2318 2319 2320# ====================================================================================================================== 2321# === Complex ============================================================================================================= 2322 2323 2324class ComplexOffsets(IntEnum): 2325 data_tuple_offset = 0 2326 2327 2328class TComplex: 2329 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: complex) -> Tuple[complex, Offset, Size]: 2330 offset, real_size = shared_memory.malloc(ObjectType.tfastset, bs * len(ComplexOffsets)) 2331 created_items_offsets: List[Offset] = list() 2332 try: 2333 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.real, obj.imag)) 2334 created_items_offsets.append(data_tuple_offset) 2335 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * ComplexOffsets.data_tuple_offset, data_tuple_offset) 2336 except: 2337 shared_memory.free(offset) 2338 for item_offset in created_items_offsets: 2339 shared_memory.destroy_obj(item_offset) 2340 2341 raise 2342 2343 return complex(real=data_tuple_mapped_obj[0], imag=data_tuple_mapped_obj[1]), offset, real_size 2344 2345 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> complex: 2346 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2347 raise WrongObjectTypeError 2348 2349 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * ComplexOffsets.data_tuple_offset) 2350 result_tuple = shared_memory.get_obj(data_tuple_offset) 2351 return complex(real=result_tuple[0], imag=result_tuple[1]) 2352 2353 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2354 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2355 raise WrongObjectTypeError 2356 2357 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * ComplexOffsets.data_tuple_offset) 2358 shared_memory.destroy_obj(data_tuple_offset) 2359 shared_memory.free(offset) 2360 2361 2362# ====================================================================================================================== 2363# === FastSet ============================================================================================================= 2364 2365 2366class FastLimitedSet(set): 2367 ... 2368 2369 2370class FastSetOffsets(IntEnum): 2371 data_tuple_offset = 0 2372 2373 2374class TFastSet: 2375 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[set, Offset, Size]: 2376 offset, real_size = shared_memory.malloc(ObjectType.tfastset, bs * len(FastSetOffsets)) 2377 created_items_offsets: List[Offset] = list() 2378 try: 2379 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj)) 2380 created_items_offsets.append(data_tuple_offset) 2381 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastSetOffsets.data_tuple_offset, data_tuple_offset) 2382 except: 2383 shared_memory.free(offset) 2384 for item_offset in created_items_offsets: 2385 shared_memory.destroy_obj(item_offset) 2386 2387 raise 2388 2389 return set(data_tuple_mapped_obj), offset, real_size 2390 2391 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> set: 2392 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2393 raise WrongObjectTypeError 2394 2395 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastSetOffsets.data_tuple_offset) 2396 result_tuple = shared_memory.get_obj(data_tuple_offset) 2397 return set(result_tuple) 2398 2399 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2400 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2401 raise WrongObjectTypeError 2402 2403 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastSetOffsets.data_tuple_offset) 2404 shared_memory.destroy_obj(data_tuple_offset) 2405 shared_memory.free(offset) 2406 2407 2408# ====================================================================================================================== 2409# === FastDict ============================================================================================================= 2410 2411 2412class FastLimitedDict(dict): 2413 ... 2414 2415 2416class FastDictOffsets(IntEnum): 2417 data_tuple_offset = 0 2418 2419 2420class TFastDict: 2421 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: dict) -> Tuple[dict, Offset, Size]: 2422 offset, real_size = shared_memory.malloc(ObjectType.tfastdict, bs * len(FastDictOffsets)) 2423 created_items_offsets: List[Offset] = list() 2424 try: 2425 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.items())) 2426 created_items_offsets.append(data_tuple_offset) 2427 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastDictOffsets.data_tuple_offset, data_tuple_offset) 2428 except: 2429 shared_memory.free(offset) 2430 for item_offset in created_items_offsets: 2431 shared_memory.destroy_obj(item_offset) 2432 2433 raise 2434 2435 return dict(data_tuple_mapped_obj), offset, real_size 2436 2437 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict: 2438 if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset): 2439 raise WrongObjectTypeError 2440 2441 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastDictOffsets.data_tuple_offset) 2442 result_tuple = shared_memory.get_obj(data_tuple_offset) 2443 return dict(result_tuple) 2444 2445 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2446 if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset): 2447 raise WrongObjectTypeError 2448 2449 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastDictOffsets.data_tuple_offset) 2450 shared_memory.destroy_obj(data_tuple_offset) 2451 shared_memory.free(offset) 2452 2453 2454# ====================================================================================================================== 2455# === Set ============================================================================================================= 2456 2457 2458class SetOffsets(IntEnum): 2459 size = 0 2460 capacity = 1 2461 hashmap_offset = 2 2462 2463 2464class SetHashmapFieldTypes(IntEnum): 2465 tnone = 0 2466 tobj = 1 2467 tbucket = 2 2468 2469 2470class SetHashmapItemOffsets(IntEnum): 2471 field_type = 0 2472 field_hash = 1 2473 obj_or_bucket = 2 2474 2475 2476class SetBucketOffsets(IntEnum): 2477 field_hash = 0 2478 obj = 1 2479 2480 2481class ISet(BaseIObject, AbsSet): 2482 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_hash_bits', '_capacity', '_size', 'hashmap', 'hashmap_offset', 'buckets') 2483 2484 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsSet = None) -> None: 2485 self._shared_memory = shared_memory 2486 self._base_address = shared_memory.base_address 2487 self._obj_size = None 2488 self._offset: Offset = None 2489 self._offset__data: Offset = None 2490 self._offset__size_offset: Offset = None 2491 self._offset__capacity_offset: Offset = None 2492 self._offset__hashmap_offset: Offset = None 2493 self._load_factor = 0.75 2494 self._hash_bits: int = None 2495 self._capacity: int = None 2496 self._size: int = None 2497 self.hashmap: IList = None 2498 self.hashmap_offset: Offset = None 2499 self.buckets: Dict[int, IList] = dict() 2500 2501 if offset is None: 2502 if obj is None: 2503 # obj = frozenset(set()) 2504 data_len = 16 2505 else: 2506 data_len = len(obj) 2507 2508 self._size: int = data_len 2509 self.hash_bits = 1 2510 self.capacity = int(ceil(data_len / self._load_factor)) 2511 2512 offset, self._obj_size = shared_memory.malloc(ObjectType.tset, bs * len(SetOffsets)) 2513 try: 2514 self._offset = offset 2515 offset__data = offset + bs * len(BaseObjOffsets) 2516 self._offset__data = offset__data 2517 self._offset__size_offset: Offset = offset__data + bs * SetOffsets.size.value 2518 self._offset__capacity_offset: Offset = offset__data + bs * SetOffsets.capacity.value 2519 self._offset__hashmap_offset = offset__data + bs * SetOffsets.hashmap_offset.value 2520 2521 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 2522 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 2523 2524 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 2525 self.hashmap = cast(IList, self.hashmap) 2526 self.hashmap_offset = hashmap_offset 2527 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 2528 hashmap_capacity = self.capacity * len(SetHashmapItemOffsets) 2529 self.hashmap.set_capacity(hashmap_capacity) 2530 self.hashmap.extend_with(hashmap_capacity, 0) 2531 hash_bits: int = self.hash_bits 2532 if obj is not None: 2533 for item in obj: 2534 item_hash = hash(item) 2535 item_info_index: int = mask_least_significant_bits(item_hash, hash_bits) * len(SetHashmapItemOffsets) 2536 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2537 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2538 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2539 field_type = self.hashmap[field_type_index] 2540 if SetHashmapFieldTypes.tnone.value == field_type: 2541 self.hashmap[field_type_index] = SetHashmapFieldTypes.tobj.value 2542 self.hashmap[item_hash_index] = item_hash 2543 self.hashmap[item_bucket_index] = item 2544 elif SetHashmapFieldTypes.tobj.value == field_type: 2545 bucket, bucket_offset, _ = shared_memory.put_obj(list()) 2546 bucket = cast(IList, bucket) 2547 bucket.set_capacity(len(SetBucketOffsets)) 2548 bucket.extend_with(len(SetBucketOffsets), 0) 2549 self.buckets[item_info_index] = bucket 2550 self.hashmap.move_item_to_list(item_hash_index, bucket, SetBucketOffsets.field_hash.value) 2551 self.hashmap.move_item_to_list(item_bucket_index, bucket, SetBucketOffsets.obj.value) 2552 self.hashmap[field_type_index] = SetHashmapFieldTypes.tbucket.value 2553 self.hashmap[item_bucket_index] = bucket_offset 2554 bucket.append(item_hash) 2555 bucket.append(item) 2556 elif SetHashmapFieldTypes.tbucket.value == field_type: 2557 bucket = self.buckets[item_info_index] 2558 bucket.append(item_hash) 2559 bucket.append(item) 2560 else: 2561 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2562 except: 2563 self._free_mem() 2564 raise 2565 else: 2566 self._offset = offset 2567 offset__data = offset + bs * len(BaseObjOffsets) 2568 self._offset__data = offset__data 2569 self._offset__size_offset: Offset = offset__data + bs * SetOffsets.size 2570 self._offset__capacity_offset: Offset = offset__data + bs * SetOffsets.capacity 2571 self._offset__hashmap_offset = offset__data + bs * SetOffsets.hashmap_offset 2572 2573 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2574 self.hash_bits = 1 2575 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2576 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2577 2578 self.hashmap_offset = hashmap_offset 2579 self.hashmap = IList(shared_memory, hashmap_offset) 2580 item_info_index: int = 0 2581 # for item_info_index in range(self.capacity): 2582 # field_type_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.field_type.value 2583 # item_hash_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.field_hash.value 2584 # item_bucket_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.obj_or_bucket.value 2585 # field_type = self.hashmap[field_type_index] 2586 # if SetHashmapFieldTypes.tnone.value == field_type: 2587 # continue 2588 # elif SetHashmapFieldTypes.tobj.value == field_type: 2589 # continue 2590 # elif SetHashmapFieldTypes.tbucket.value == field_type: 2591 # bucket_offset = self.hashmap[item_bucket_index] 2592 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2593 # else: 2594 # raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2595 2596 for item_info_index in range(0, self.capacity * len(SetHashmapItemOffsets), len(SetHashmapItemOffsets)): 2597 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2598 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2599 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2600 field_type = self.hashmap[field_type_index] 2601 if SetHashmapFieldTypes.tnone.value == field_type: 2602 continue 2603 elif SetHashmapFieldTypes.tobj.value == field_type: 2604 continue 2605 elif SetHashmapFieldTypes.tbucket.value == field_type: 2606 bucket_offset = self.hashmap[item_bucket_index] 2607 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2608 else: 2609 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2610 2611 def __len__(self): 2612 return self._size 2613 2614 def __iter__(self): 2615 return ISetIterator(self) 2616 2617 def __contains__(self, obj: Any) -> bool: 2618 item_hash = hash(obj) 2619 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(SetHashmapItemOffsets) 2620 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2621 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2622 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2623 field_type = self.hashmap[field_type_index] 2624 if SetHashmapFieldTypes.tnone.value == field_type: 2625 return False 2626 elif SetHashmapFieldTypes.tobj.value == field_type: 2627 return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]) 2628 elif SetHashmapFieldTypes.tbucket.value == field_type: 2629 bucket = self.buckets[item_info_index] 2630 # for sub_item_info_index in range(len(bucket)): 2631 for sub_item_info_index in range(0, len(bucket) * len(SetBucketOffsets), len(SetBucketOffsets)): 2632 sub_item_hash_index = sub_item_info_index + SetBucketOffsets.field_hash.value 2633 sub_item_obj_index = sub_item_info_index + SetBucketOffsets.obj.value 2634 if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_obj_index]): 2635 return True 2636 2637 return False 2638 else: 2639 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2640 2641 def __hash__(self): 2642 return self._hash() 2643 2644 @property 2645 def hash_bits(self) -> int: 2646 return self._hash_bits 2647 2648 @hash_bits.setter 2649 def hash_bits(self, value: int) -> None: 2650 self._hash_bits = value 2651 self._capacity = 2 ** value 2652 2653 @property 2654 def capacity(self) -> int: 2655 return self._capacity 2656 2657 @capacity.setter 2658 def capacity(self, value: int) -> None: 2659 if value <= self._capacity: 2660 return 2661 2662 if value <= 2: 2663 self.hash_bits = 1 2664 else: 2665 self.hash_bits = int(ceil(log2(value))) 2666 2667 def __str__(self) -> str: 2668 return set(self).__str__() 2669 2670 def __repr__(self) -> str: 2671 return set(self).__repr__() 2672 2673 def _free_mem(self): 2674 if self._offset is not None: 2675 for _, bucket in self.buckets.items(): 2676 self._shared_memory.destroy_obj(bucket._offset) 2677 2678 self.buckets.clear() 2679 if self.hashmap_offset is not None: 2680 self._shared_memory.destroy_obj(self.hashmap_offset) 2681 self.hashmap_offset = None 2682 2683 self._shared_memory.free(self._offset) 2684 self._offset = None 2685 2686 2687class ISetIterator: 2688 def __init__(self, iset: ISet) -> None: 2689 self._iset = iset 2690 self._index = 0 2691 self._sub_index = 0 2692 2693 def __next__(self): 2694 while self._index < self._iset.capacity: 2695 item_info_index: int = self._index * len(SetHashmapItemOffsets) 2696 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2697 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2698 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2699 field_type = self._iset.hashmap[field_type_index] 2700 if SetHashmapFieldTypes.tnone.value == field_type: 2701 self._index += 1 2702 continue 2703 elif SetHashmapFieldTypes.tobj.value == field_type: 2704 result = self._iset.hashmap[item_bucket_index] 2705 self._index += 1 2706 break 2707 elif SetHashmapFieldTypes.tbucket.value == field_type: 2708 bucket = self._iset.buckets[item_info_index] 2709 sub_item_info_index = self._sub_index 2710 sub_item_hash_index = sub_item_info_index * len(SetBucketOffsets) + SetBucketOffsets.field_hash.value 2711 sub_item_obj_index = sub_item_info_index * len(SetBucketOffsets) + SetBucketOffsets.obj.value 2712 if (sub_item_info_index * len(SetBucketOffsets)) >= len(bucket): 2713 self._sub_index = 0 2714 self._index += 1 2715 continue 2716 2717 result = bucket[sub_item_obj_index] 2718 self._sub_index += 1 2719 break 2720 else: 2721 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2722 else: 2723 raise StopIteration 2724 2725 return result 2726 2727 def __iter__(self): 2728 return self 2729 2730 2731class TSet: 2732 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[AbsSet, Offset, Size]: 2733 obj: ISet = ISet(shared_memory, obj=obj) 2734 return obj, obj._offset, obj._obj_size 2735 2736 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> ISet: 2737 if ObjectType.tset != read_uint64(shared_memory.base_address, offset): 2738 raise WrongObjectTypeError 2739 2740 return ISet(shared_memory, offset) 2741 2742 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2743 if ObjectType.tset != read_uint64(shared_memory.base_address, offset): 2744 raise WrongObjectTypeError 2745 2746 obj: ISet = ISet(shared_memory, offset) 2747 obj._free_mem() 2748 2749 2750# ====================================================================================================================== 2751# === MutableSet ============================================================================================================= 2752 2753 2754class MutableSetOffsets(IntEnum): 2755 size = 0 2756 capacity = 1 2757 hashmap_offset = 2 2758 refresh_counter = 3 2759 2760 2761class MutableSetHashmapFieldTypes(IntEnum): 2762 tnone = 0 2763 tobj = 1 2764 tbucket = 2 2765 2766 2767class MutableSetHashmapItemOffsets(IntEnum): 2768 field_type = 0 2769 field_hash = 1 2770 obj_or_bucket = 2 2771 2772 2773class MutableSetBucketFieldTypes(IntEnum): 2774 tnone = 0 2775 tobj = 1 2776 2777 2778class MutableSetBucketOffsets(IntEnum): 2779 field_type = 0 2780 field_hash = 1 2781 obj = 2 2782 2783 2784class IMutableSet(BaseIObject, AbsMutableSet): 2785 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', '_refresh_counter', 'hashmap_offset', 'buckets', 'ignore_rehash') 2786 2787 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableSet = None) -> None: 2788 self._shared_memory = shared_memory 2789 self._base_address = shared_memory.base_address 2790 self._obj_size = None 2791 self._offset: Offset = None 2792 self._offset__data: Offset = None 2793 self._offset__size_offset: Offset = None 2794 self._offset__capacity_offset: Offset = None 2795 self._offset__hashmap_offset: Offset = None 2796 self._offset__refresh_counter_offset: Offset = None 2797 self._load_factor = 0.75 2798 self._load_factor_2 = 0.5625 2799 self._hash_bits: int = None 2800 self._capacity: int = None 2801 self._min_capacity: int = None 2802 self._size: int = None 2803 self.hashmap: IList = None 2804 self._refresh_counter: int = 0 2805 self.hashmap_offset: Offset = None 2806 self.buckets: Dict[int, IList] = dict() 2807 2808 self.ignore_rehash: bool = True 2809 2810 if offset is None: 2811 if obj is None: 2812 # obj = frozenset(set()) 2813 data_len = 16 2814 else: 2815 data_len = len(obj) 2816 2817 self._size = 0 2818 self.hash_bits = 1 2819 self.capacity = int(ceil(data_len / self._load_factor)) 2820 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2821 2822 offset, self._obj_size = shared_memory.malloc(ObjectType.tmutableset, bs * len(MutableSetOffsets)) 2823 try: 2824 self._offset = offset 2825 offset__data = offset + bs * len(BaseObjOffsets) 2826 self._offset__data = offset__data 2827 self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size.value 2828 self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity.value 2829 self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset.value 2830 self._offset__refresh_counter_offset = offset__data + bs * MutableSetOffsets.refresh_counter.value 2831 2832 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 2833 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 2834 write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter) 2835 2836 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 2837 self.hashmap = cast(IList, self.hashmap) 2838 self.hashmap_offset = hashmap_offset 2839 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 2840 hashmap_capacity = self.capacity * len(MutableSetHashmapItemOffsets) 2841 self.hashmap.set_capacity(hashmap_capacity) 2842 self.hashmap.extend_with(hashmap_capacity, 0) 2843 hash_bits: int = self.hash_bits 2844 if obj is None: 2845 pass 2846 elif isinstance(obj, IMutableSet): 2847 self._move_from(obj) 2848 else: 2849 for item in obj: 2850 self.add(item) 2851 2852 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 2853 2854 self.ignore_rehash = False 2855 except: 2856 self._free_mem() 2857 raise 2858 else: 2859 self._refresh_hashmap(offset) 2860 self.ignore_rehash = False 2861 2862 # self._offset = offset 2863 # offset__data = offset + bs * len(BaseObjOffsets) 2864 # self._offset__data = offset__data 2865 # self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size 2866 # self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity 2867 # self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset 2868 2869 # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2870 # self.hash_bits = 1 2871 # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2872 # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2873 # self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2874 2875 # self.hashmap_offset = hashmap_offset 2876 # self.hashmap = IList(shared_memory, hashmap_offset) 2877 # item_info_index: int = 0 2878 # # for item_info_index in range(self.capacity): 2879 # # field_type_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_type.value 2880 # # item_hash_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_hash.value 2881 # # item_bucket_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.obj_or_bucket.value 2882 # # field_type = self.hashmap[field_type_index] 2883 # # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2884 # # continue 2885 # # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2886 # # continue 2887 # # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2888 # # bucket_offset = self.hashmap[item_bucket_index] 2889 # # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2890 # # else: 2891 # # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2892 2893 # for item_info_index in range(0, self.capacity * len(MutableSetHashmapItemOffsets), len(MutableSetHashmapItemOffsets)): 2894 # field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 2895 # item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 2896 # item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 2897 # field_type = self.hashmap[field_type_index] 2898 # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2899 # continue 2900 # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2901 # continue 2902 # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2903 # bucket_offset = self.hashmap[item_bucket_index] 2904 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2905 # else: 2906 # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2907 2908 # self.ignore_rehash = False 2909 2910 def _refresh_hashmap(self, offset: Offset): 2911 # ignore_rehash = self.ignore_rehash 2912 # self.ignore_rehash = True 2913 2914 self._hash_bits = None 2915 self._capacity = None 2916 self._min_capacity = None 2917 self._size = None 2918 self.hashmap = None 2919 self._refresh_counter = 0 2920 self.hashmap_offset = None 2921 self.buckets = dict() 2922 2923 shared_memory = self._shared_memory 2924 self._offset = offset 2925 offset__data = offset + bs * len(BaseObjOffsets) 2926 self._offset__data = offset__data 2927 self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size 2928 self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity 2929 self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset 2930 self._offset__refresh_counter_offset = offset__data + bs * MutableSetOffsets.refresh_counter.value 2931 2932 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 2933 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2934 self.hash_bits = 1 2935 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2936 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2937 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2938 2939 self.hashmap_offset = hashmap_offset 2940 self.hashmap = IList(shared_memory, hashmap_offset) 2941 item_info_index: int = 0 2942 # for item_info_index in range(self.capacity): 2943 # field_type_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_type.value 2944 # item_hash_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_hash.value 2945 # item_bucket_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.obj_or_bucket.value 2946 # field_type = self.hashmap[field_type_index] 2947 # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2948 # continue 2949 # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2950 # continue 2951 # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2952 # bucket_offset = self.hashmap[item_bucket_index] 2953 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2954 # else: 2955 # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2956 2957 for item_info_index in range(0, self.capacity * len(MutableSetHashmapItemOffsets), len(MutableSetHashmapItemOffsets)): 2958 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 2959 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 2960 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 2961 field_type = self.hashmap[field_type_index] 2962 if MutableSetHashmapFieldTypes.tnone.value == field_type: 2963 continue 2964 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2965 continue 2966 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2967 bucket_offset = self.hashmap[item_bucket_index] 2968 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2969 else: 2970 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2971 2972 # self.ignore_rehash = ignore_rehash 2973 2974 @property 2975 def refresh_counter(self): 2976 return read_uint64(self._base_address, self._offset__refresh_counter_offset) 2977 2978 def _increase_refresh_counter(self): 2979 if not self.ignore_rehash: 2980 self._refresh_counter += 1 2981 write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter) 2982 2983 def _check_hashmap(self): 2984 if self.ignore_rehash: 2985 return False 2986 else: 2987 base_address = self._base_address 2988 refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset) 2989 # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset) 2990 # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset): 2991 if self._refresh_counter != refresh_counter: 2992 self._refresh_hashmap(self._offset) 2993 return True 2994 2995 return False 2996 2997 # @property 2998 # def hashmap(self) -> IList: 2999 # if self.ignore_rehash: 3000 # return self._hashmap 3001 # else: 3002 # hashmap_offset = read_uint64(self._base_address, self._offset__hashmap_offset) 3003 # if (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset): 3004 # self._refresh_hashmap(self._offset) 3005 3006 # return self._hashmap 3007 3008 # @hashmap.setter 3009 # def hashmap(self, value: IList): 3010 # self._hashmap = value 3011 3012 def _increase_size(self): 3013 self._size += 1 3014 write_uint64(self._base_address, self._offset__size_offset, self._size) 3015 if (self._size > self._capacity) or (self._size < self._min_capacity): 3016 self._rehash() 3017 3018 def _decrease_size(self): 3019 self._size -= 1 3020 if self._size < 0: 3021 raise RuntimeError('Size of the set is negative') 3022 3023 write_uint64(self._base_address, self._offset__size_offset, self._size) 3024 if (self._size > self._capacity) or (self._size < self._min_capacity): 3025 self._rehash() 3026 3027 def _move_from(self, other: 'IMutableSet'): 3028 for value_hash, value_type, value_offset in other.iter_offset_pop(): 3029 self.add_as_offset(value_hash, value_type, value_offset) 3030 3031 def _rehash(self): 3032 if self.ignore_rehash: 3033 return 3034 3035 self._increase_refresh_counter() 3036 3037 ignore_rehash = self.ignore_rehash 3038 self.ignore_rehash = True 3039 3040 new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self) 3041 new_other = cast(IMutableSet, new_other) 3042 3043 other_capacity = new_other._capacity 3044 other_hash_bits = new_other._hash_bits 3045 other_min_capacity = new_other._min_capacity 3046 other_size = new_other._size 3047 # other_refresh_counter = new_other._refresh_counter 3048 other_hashmap = new_other.hashmap 3049 other_hashmap_offset = new_other.hashmap_offset 3050 other_buckets = new_other.buckets 3051 other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset) 3052 other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset) 3053 other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset) 3054 # other_refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset) 3055 3056 new_other._capacity = self._capacity 3057 new_other._hash_bits = self._hash_bits 3058 new_other._min_capacity = self._min_capacity 3059 new_other._size = self._size 3060 # new_other._refresh_counter = self._refresh_counter 3061 new_other.hashmap = self.hashmap 3062 new_other.hashmap_offset = self.hashmap_offset 3063 new_other.buckets = self.buckets 3064 write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset)) 3065 write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset)) 3066 write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset)) 3067 # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset)) 3068 3069 self._capacity = other_capacity 3070 self._hash_bits = other_hash_bits 3071 self._min_capacity = other_min_capacity 3072 self._size = other_size 3073 # self._refresh_counter = other_refresh_counter 3074 self.hashmap = other_hashmap 3075 self.hashmap_offset = other_hashmap_offset 3076 self.buckets = other_buckets 3077 write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin) 3078 write_uint64(self._base_address, self._offset__size_offset, other_size_bin) 3079 write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin) 3080 # write_uint64(self._base_address, self._offset__refresh_counter_offset, other_refresh_counter_bin) 3081 3082 self._shared_memory.destroy_obj(new_other_offset) 3083 3084 self.ignore_rehash = ignore_rehash 3085 3086 def __len__(self): 3087 self._check_hashmap() 3088 return self._size 3089 3090 def __iter__(self): 3091 self._check_hashmap() 3092 return IMutableSetIterator(self) 3093 3094 def iter_offset(self): 3095 self._check_hashmap() 3096 return IMutableSetIteratorAsOffset(self) 3097 3098 def iter_offset_pop(self): 3099 self._check_hashmap() 3100 return IMutableSetIteratorAsOffset(self, True) 3101 3102 def __contains__(self, obj: Any) -> bool: 3103 self._check_hashmap() 3104 item_hash = hash(obj) 3105 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3106 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3107 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3108 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3109 field_type = self.hashmap[field_type_index] 3110 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3111 return False 3112 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3113 return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]) 3114 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3115 bucket_offset = self.hashmap[item_bucket_index] 3116 try: 3117 bucket = self.buckets[item_info_index] 3118 if bucket._offset != bucket_offset: 3119 raise KeyError 3120 except KeyError: 3121 raise 3122 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3123 3124 for bucket_item_index in range(0, len(bucket), len(MutableSetBucketOffsets)): 3125 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3126 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3127 continue 3128 3129 bucket_field_hash = bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] 3130 bucket_obj = bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] 3131 if (item_hash == bucket_field_hash) and (obj == bucket_obj): 3132 return True 3133 3134 return False 3135 else: 3136 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3137 3138 def add(self, value): 3139 """Add an element.""" 3140 self._check_hashmap() 3141 item = value 3142 item_hash = hash(item) 3143 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3144 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3145 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3146 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3147 field_type = self.hashmap[field_type_index] 3148 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3149 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tobj.value 3150 self.hashmap[item_hash_index] = item_hash 3151 self.hashmap[item_bucket_index] = item 3152 self._increase_size() 3153 return 3154 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3155 if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap[item_bucket_index]): 3156 return 3157 3158 self._increase_refresh_counter() 3159 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 3160 bucket = cast(IList, bucket) 3161 bucket.set_capacity(len(MutableSetBucketOffsets)) 3162 bucket.extend_with(len(MutableSetBucketOffsets), 0) 3163 self.buckets[item_info_index] = bucket 3164 bucket[MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3165 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableSetBucketOffsets.field_hash.value) 3166 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableSetBucketOffsets.obj.value) 3167 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tbucket.value 3168 self.hashmap[item_bucket_index] = bucket_offset 3169 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3170 bucket.append(item_hash) 3171 bucket.append(item) 3172 self._increase_size() 3173 return 3174 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3175 bucket_offset = self.hashmap[item_bucket_index] 3176 try: 3177 bucket = self.buckets[item_info_index] 3178 if bucket._offset != bucket_offset: 3179 raise KeyError 3180 except KeyError: 3181 raise 3182 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3183 3184 bucket_len: int = len(bucket) 3185 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3186 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3187 if MutableSetBucketFieldTypes.tobj.value == bucket_field_type: 3188 if (item_hash == bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value]) and (item == bucket[bucket_item_index + MutableSetBucketOffsets.obj.value]): 3189 return 3190 3191 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3192 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3193 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3194 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3195 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = item_hash 3196 bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] = item 3197 self._increase_size() 3198 return 3199 else: 3200 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3201 bucket.append(item_hash) 3202 bucket.append(item) 3203 self._increase_size() 3204 return 3205 else: 3206 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3207 3208 def add_as_offset(self, value_hash, value_type, value_offset): 3209 """Add an element.""" 3210 self._check_hashmap() 3211 item = (value_type, value_offset) 3212 item_hash = value_hash 3213 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3214 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3215 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3216 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3217 field_type = self.hashmap[field_type_index] 3218 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3219 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tobj.value 3220 self.hashmap[item_hash_index] = item_hash 3221 self.hashmap.setitem_as_offset(item_bucket_index, item) 3222 self._increase_size() 3223 return 3224 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3225 if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap.getitem_as_offset(item_bucket_index)): 3226 return 3227 3228 self._increase_refresh_counter() 3229 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 3230 bucket = cast(IList, bucket) 3231 bucket.set_capacity(len(MutableSetBucketOffsets)) 3232 bucket.extend_with(len(MutableSetBucketOffsets), 0) 3233 self.buckets[item_info_index] = bucket 3234 bucket[MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3235 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableSetBucketOffsets.field_hash.value) 3236 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableSetBucketOffsets.obj.value) 3237 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tbucket.value 3238 self.hashmap[item_bucket_index] = bucket_offset 3239 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3240 bucket.append(item_hash) 3241 bucket.append_as_offset(item) 3242 self._increase_size() 3243 return 3244 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3245 bucket_offset = self.hashmap[item_bucket_index] 3246 try: 3247 bucket = self.buckets[item_info_index] 3248 if bucket._offset != bucket_offset: 3249 raise KeyError 3250 except KeyError: 3251 raise 3252 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3253 3254 bucket_len: int = len(bucket) 3255 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3256 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3257 if MutableSetBucketFieldTypes.tobj.value == bucket_field_type: 3258 if (item_hash == bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value]) and (item == bucket.getitem_as_offset(bucket_item_index + MutableSetBucketOffsets.obj.value)): 3259 return 3260 3261 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3262 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3263 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3264 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3265 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = item_hash 3266 bucket.setitem_as_offset(bucket_item_index + MutableSetBucketOffsets.obj.value, item) 3267 self._increase_size() 3268 return 3269 else: 3270 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3271 bucket.append(item_hash) 3272 bucket.append_as_offset(item) 3273 self._increase_size() 3274 return 3275 else: 3276 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3277 3278 def discard(self, value): 3279 """Remove an element. Do not raise an exception if absent.""" 3280 self._check_hashmap() 3281 obj = value 3282 item_hash = hash(obj) 3283 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3284 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3285 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3286 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3287 field_type = self.hashmap[field_type_index] 3288 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3289 return 3290 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3291 if (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]): 3292 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tnone.value 3293 self.hashmap[item_hash_index] = None 3294 self.hashmap[item_bucket_index] = None 3295 self._decrease_size() 3296 return 3297 else: 3298 return 3299 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3300 bucket_offset = self.hashmap[item_bucket_index] 3301 try: 3302 bucket = self.buckets[item_info_index] 3303 if bucket._offset != bucket_offset: 3304 raise KeyError 3305 except KeyError: 3306 raise 3307 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3308 3309 for bucket_item_index in range(0, len(bucket), len(MutableSetBucketOffsets)): 3310 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3311 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3312 continue 3313 3314 bucket_field_hash = bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] 3315 bucket_obj = bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] 3316 if (item_hash == bucket_field_hash) and (obj == bucket_obj): 3317 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tnone.value 3318 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = None 3319 bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] = None 3320 self._decrease_size() 3321 return 3322 return 3323 else: 3324 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3325 3326 @property 3327 def hash_bits(self) -> int: 3328 return self._hash_bits 3329 3330 @hash_bits.setter 3331 def hash_bits(self, value: int) -> None: 3332 self._hash_bits = value 3333 self._capacity = 2 ** value 3334 3335 @property 3336 def capacity(self) -> int: 3337 return self._capacity 3338 3339 @capacity.setter 3340 def capacity(self, value: int) -> None: 3341 if value <= self._capacity: 3342 return 3343 3344 if value <= 2: 3345 self.hash_bits = 1 3346 else: 3347 self.hash_bits = int(ceil(log2(value))) 3348 3349 def __str__(self) -> str: 3350 self._check_hashmap() 3351 return set(self).__str__() 3352 3353 def __repr__(self) -> str: 3354 self._check_hashmap() 3355 return set(self).__repr__() 3356 3357 def _free_mem(self): 3358 if self._offset is not None: 3359 for _, bucket in self.buckets.items(): 3360 self._shared_memory.destroy_obj(bucket._offset) 3361 3362 self.buckets.clear() 3363 if self.hashmap_offset is not None: 3364 self._shared_memory.destroy_obj(self.hashmap_offset) 3365 self.hashmap_offset = None 3366 3367 self._shared_memory.free(self._offset) 3368 self._offset = None 3369 3370 3371class IMutableSetIterator: 3372 def __init__(self, iset: IMutableSet) -> None: 3373 self._iset = iset 3374 self._index = 0 3375 self._sub_index = 0 3376 3377 def __next__(self): 3378 if self._iset._check_hashmap(): 3379 raise RuntimeError("Sets's hashmap changed during iteration") 3380 3381 while self._index < self._iset.capacity: 3382 item_info_index: int = self._index * len(MutableSetHashmapItemOffsets) 3383 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3384 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3385 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3386 field_type = self._iset.hashmap[field_type_index] 3387 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3388 self._index += 1 3389 continue 3390 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3391 result = self._iset.hashmap[item_bucket_index] 3392 self._index += 1 3393 return result 3394 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3395 bucket_offset = self._iset.hashmap[item_bucket_index] 3396 try: 3397 bucket = self._iset.buckets[item_info_index] 3398 if bucket._offset != bucket_offset: 3399 raise KeyError 3400 except KeyError: 3401 raise 3402 self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset) 3403 3404 bucket_len = len(bucket) 3405 sub_item_info_index = self._sub_index 3406 while (sub_item_info_index * len(MutableSetBucketOffsets)) < bucket_len: 3407 sub_item_field_type_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_type.value 3408 if bucket[sub_item_field_type_index] == MutableSetBucketFieldTypes.tnone.value: 3409 sub_item_info_index += 1 3410 continue 3411 3412 sub_item_hash_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_hash.value 3413 sub_item_obj_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.obj.value 3414 result = bucket[sub_item_obj_index] 3415 self._sub_index += 1 3416 return result 3417 else: 3418 self._sub_index = 0 3419 self._index += 1 3420 continue 3421 else: 3422 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3423 else: 3424 raise StopIteration 3425 3426 def __iter__(self): 3427 return self 3428 3429 3430class IMutableSetIteratorAsOffset: 3431 def __init__(self, iset: IMutableSet, pop: bool = False) -> None: 3432 self._iset = iset 3433 self._pop: bool = pop 3434 self._index = 0 3435 self._sub_index = 0 3436 3437 def __next__(self): 3438 if self._iset._check_hashmap(): 3439 raise RuntimeError("Set's hashmap changed during iteration") 3440 3441 if self._index < self._iset.capacity: 3442 while self._index < self._iset.capacity: 3443 item_info_index: int = self._index * len(MutableSetHashmapItemOffsets) 3444 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3445 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3446 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3447 field_type = self._iset.hashmap[field_type_index] 3448 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3449 self._index += 1 3450 continue 3451 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3452 item_hash = self._iset.hashmap[item_hash_index] 3453 value_type, value_offset = self._iset.hashmap.getitem_as_offset(item_bucket_index) 3454 if self._pop: 3455 self._iset.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tnone.value 3456 self._iset.hashmap[item_hash_index] = None 3457 self._iset.hashmap.setitem_as_offset(item_bucket_index, (InternalListFieldTypes.tnone.value, 0), False) 3458 3459 self._index += 1 3460 return (item_hash, value_type, value_offset) 3461 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3462 bucket_offset = self._iset.hashmap[item_bucket_index] 3463 try: 3464 bucket = self._iset.buckets[item_info_index] 3465 if bucket._offset != bucket_offset: 3466 raise KeyError 3467 except KeyError: 3468 raise 3469 self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset) 3470 3471 bucket_len = len(bucket) 3472 sub_item_info_index = self._sub_index 3473 while (sub_item_info_index * len(MutableSetBucketOffsets)) < bucket_len: 3474 sub_item_field_type_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_type.value 3475 if bucket[sub_item_field_type_index] == MutableSetBucketFieldTypes.tnone.value: 3476 sub_item_info_index += 1 3477 continue 3478 3479 sub_item_hash_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_hash.value 3480 sub_item_obj_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.obj.value 3481 sub_item_hash = bucket[sub_item_hash_index] 3482 sub_item_value_type, sub_item_value_offset = bucket.getitem_as_offset(sub_item_obj_index) 3483 if self._pop: 3484 bucket[sub_item_field_type_index] = MutableSetHashmapFieldTypes.tnone.value 3485 bucket[sub_item_hash_index] = None 3486 bucket.setitem_as_offset(sub_item_obj_index, (InternalListFieldTypes.tnone.value, 0), False) 3487 3488 self._sub_index += 1 3489 return (sub_item_hash, sub_item_value_type, sub_item_value_offset) 3490 else: 3491 self._sub_index = 0 3492 self._index += 1 3493 continue 3494 else: 3495 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3496 else: 3497 raise StopIteration 3498 else: 3499 raise StopIteration 3500 3501 def __iter__(self): 3502 return self 3503 3504 3505class TMutableSet: 3506 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[IMutableSet, Offset, Size]: 3507 obj: IMutableSet = IMutableSet(shared_memory, obj=obj) 3508 return obj, obj._offset, obj._obj_size 3509 3510 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableSet: 3511 if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset): 3512 raise WrongObjectTypeError 3513 3514 return IMutableSet(shared_memory, offset) 3515 3516 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 3517 if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset): 3518 raise WrongObjectTypeError 3519 3520 obj: IMutableSet = IMutableSet(shared_memory, offset) 3521 obj._free_mem() 3522 3523 3524# ====================================================================================================================== 3525# === Mapping ============================================================================================================= 3526 3527 3528class ForceMapping(dict): 3529 ... 3530 3531 3532FMapping = ForceMapping 3533forcemapping = ForceMapping 3534fmapping = ForceMapping 3535 3536 3537class MappingOffsets(IntEnum): 3538 size = 0 3539 capacity = 1 3540 hashmap_offset = 2 3541 3542 3543class MappingHashmapFieldTypes(IntEnum): 3544 tnone = 0 3545 tobj = 1 3546 tbucket = 2 3547 3548 3549class MappingHashmapItemOffsets(IntEnum): 3550 field_type = 0 3551 field_hash = 1 3552 key_or_bucket = 2 3553 value_or_none = 3 3554 3555 3556class MappingBucketOffsets(IntEnum): 3557 field_hash = 0 3558 key_obj = 1 3559 value_obj = 2 3560 3561 3562class IMapping(BaseIObject, AbsMapping): 3563 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_offset__refresh_counter_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', '_refresh_counter', 'hashmap_offset', 'buckets', 'ignore_rehash') 3564 3565 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMapping = None) -> None: 3566 self._shared_memory = shared_memory 3567 self._base_address = shared_memory.base_address 3568 self._obj_size = None 3569 self._offset: Offset = None 3570 self._offset__data: Offset = None 3571 self._offset__size_offset: Offset = None 3572 self._offset__capacity_offset: Offset = None 3573 self._offset__hashmap_offset: Offset = None 3574 self._load_factor = 0.75 3575 self._hash_bits: int = None 3576 self._capacity: int = None 3577 self._size: int = None 3578 self.hashmap: IList = None 3579 self.hashmap_offset: Offset = None 3580 self.buckets: Dict[int, IList] = dict() 3581 3582 if offset is None: 3583 if obj is None: 3584 # obj = frozenset(set()) 3585 data_len = 16 3586 else: 3587 data_len = len(obj) 3588 3589 self._size: int = data_len 3590 self.hash_bits = 1 3591 self.capacity = int(ceil(data_len / self._load_factor)) 3592 3593 offset, self._obj_size = shared_memory.malloc(ObjectType.tmapping, bs * len(MappingOffsets)) 3594 try: 3595 self._offset = offset 3596 offset__data = offset + bs * len(BaseObjOffsets) 3597 self._offset__data = offset__data 3598 self._offset__size_offset: Offset = offset__data + bs * MappingOffsets.size.value 3599 self._offset__capacity_offset: Offset = offset__data + bs * MappingOffsets.capacity.value 3600 self._offset__hashmap_offset = offset__data + bs * MappingOffsets.hashmap_offset.value 3601 3602 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 3603 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 3604 3605 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 3606 self.hashmap = cast(IList, self.hashmap) 3607 self.hashmap_offset = hashmap_offset 3608 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 3609 hashmap_capacity = self.capacity * len(MappingHashmapItemOffsets) 3610 self.hashmap.set_capacity(hashmap_capacity) 3611 self.hashmap.extend_with(hashmap_capacity, 0) 3612 hash_bits: int = self.hash_bits 3613 if obj is not None: 3614 for key, value in obj.items(): 3615 key_hash = hash(key) 3616 item_info_index: int = mask_least_significant_bits(key_hash, hash_bits) * len(MappingHashmapItemOffsets) 3617 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3618 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3619 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3620 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3621 field_type = self.hashmap[field_type_index] 3622 if MappingHashmapFieldTypes.tnone.value == field_type: 3623 self.hashmap[field_type_index] = MappingHashmapFieldTypes.tobj.value 3624 self.hashmap[item_hash_index] = key_hash 3625 self.hashmap[item_bucket_index] = key 3626 self.hashmap[item_value_index] = value 3627 elif MappingHashmapFieldTypes.tobj.value == field_type: 3628 bucket, bucket_offset, _ = shared_memory.put_obj(list()) 3629 bucket = cast(IList, bucket) 3630 bucket.set_capacity(len(MappingBucketOffsets)) 3631 bucket.extend_with(len(MappingBucketOffsets), 0) 3632 self.buckets[item_info_index] = bucket 3633 self.hashmap.move_item_to_list(item_hash_index, bucket, MappingBucketOffsets.field_hash.value) 3634 self.hashmap.move_item_to_list(item_bucket_index, bucket, MappingBucketOffsets.key_obj.value) 3635 self.hashmap.move_item_to_list(item_value_index, bucket, MappingBucketOffsets.value_obj.value) 3636 self.hashmap[field_type_index] = MappingHashmapFieldTypes.tbucket.value 3637 self.hashmap[item_bucket_index] = bucket_offset 3638 bucket.append(key_hash) 3639 bucket.append(key) 3640 bucket.append(value) 3641 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3642 bucket = self.buckets[item_info_index] 3643 bucket.append(key_hash) 3644 bucket.append(key) 3645 bucket.append(value) 3646 else: 3647 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3648 3649 # print(f'Constructed {self.hashmap=}') 3650 # print(f'\tConstructed buckets:') 3651 # pdi(self.buckets) 3652 # for bucket_index, bucket in self.buckets.items(): 3653 # pdi(bucket) 3654 # print(f'\t\t{bucket_index}:', bucket) 3655 except: 3656 self._free_mem() 3657 raise 3658 else: 3659 self._offset = offset 3660 offset__data = offset + bs * len(BaseObjOffsets) 3661 self._offset__data = offset__data 3662 self._offset__size_offset: Offset = offset__data + bs * MappingOffsets.size.value 3663 self._offset__capacity_offset: Offset = offset__data + bs * MappingOffsets.capacity.value 3664 self._offset__hashmap_offset = offset__data + bs * MappingOffsets.hashmap_offset.value 3665 3666 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 3667 self.hash_bits = 1 3668 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 3669 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 3670 3671 self.hashmap_offset = hashmap_offset 3672 self.hashmap = IList(shared_memory, hashmap_offset) 3673 # print(f'Adopted by {type(self)}: {self.hashmap=}') 3674 item_info_index: int = 0 3675 # for item_info_index in range(self.capacity): 3676 # field_type_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_type.value 3677 # item_hash_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_hash.value 3678 # item_bucket_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.key_or_bucket.value 3679 # item_value_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.value_or_none.value 3680 # field_type = self.hashmap[field_type_index] 3681 # if MappingHashmapFieldTypes.tnone.value == field_type: 3682 # continue 3683 # elif MappingHashmapFieldTypes.tobj.value == field_type: 3684 # continue 3685 # elif MappingHashmapFieldTypes.tbucket.value == field_type: 3686 # bucket_offset = self.hashmap[item_bucket_index] 3687 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 3688 # else: 3689 # raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3690 3691 for item_info_index in range(0, self.capacity * len(MappingHashmapItemOffsets), len(MappingHashmapItemOffsets)): 3692 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3693 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3694 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3695 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3696 field_type = self.hashmap[field_type_index] 3697 if MappingHashmapFieldTypes.tnone.value == field_type: 3698 continue 3699 elif MappingHashmapFieldTypes.tobj.value == field_type: 3700 continue 3701 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3702 bucket_offset = self.hashmap[item_bucket_index] 3703 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 3704 else: 3705 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3706 3707 # print(f'\tAdopted buckets:') 3708 # pdi(self.buckets) 3709 # for bucket_index, bucket in self.buckets.items(): 3710 # pdi(bucket) 3711 # print(f'\t\t{bucket_index}:', bucket) 3712 3713 def __len__(self): 3714 return self._size 3715 3716 def __iter__(self): 3717 return IMappingIterator(self) 3718 3719 # def __contains__(self, obj: Hashable) -> bool: 3720 # item_hash = hash(obj) 3721 # item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) 3722 # field_type_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_type.value 3723 # item_hash_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_hash.value 3724 # item_bucket_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.key_or_bucket.value 3725 # item_value_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.value_or_none.value 3726 # field_type = self.hashmap[field_type_index] 3727 # if MappingHashmapFieldTypes.tnone.value == field_type: 3728 # return False 3729 # elif MappingHashmapFieldTypes.tobj.value == field_type: 3730 # return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]) 3731 # elif MappingHashmapFieldTypes.tbucket.value == field_type: 3732 # bucket = self.buckets[item_info_index] 3733 # # for sub_item_info_index in range(len(bucket)): 3734 # for sub_item_info_index in range(0, len(bucket) * len(MappingBucketOffsets), len(MappingBucketOffsets)): 3735 # sub_item_hash_index = sub_item_info_index + MappingBucketOffsets.field_hash.value 3736 # sub_item_key_obj_index = sub_item_info_index + MappingBucketOffsets.key_obj.value 3737 # sub_item_value_obj_index = sub_item_info_index + MappingBucketOffsets.value_obj.value 3738 # if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_key_obj_index]): 3739 # return True 3740 3741 # return False 3742 # else: 3743 # raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3744 3745 def __getitem__(self, key: Hashable): 3746 item_hash = hash(key) 3747 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MappingHashmapItemOffsets) 3748 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3749 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3750 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3751 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3752 field_type = self.hashmap[field_type_index] 3753 if MappingHashmapFieldTypes.tnone.value == field_type: 3754 raise KeyError 3755 elif MappingHashmapFieldTypes.tobj.value == field_type: 3756 if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 3757 return self.hashmap[item_value_index] 3758 else: 3759 raise KeyError 3760 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3761 bucket = self.buckets[item_info_index] 3762 # for sub_item_info_index in range(len(bucket)): 3763 for sub_item_info_index in range(0, len(bucket) * len(MappingBucketOffsets), len(MappingBucketOffsets)): 3764 sub_item_hash_index = sub_item_info_index + MappingBucketOffsets.field_hash.value 3765 sub_item_key_obj_index = sub_item_info_index + MappingBucketOffsets.key_obj.value 3766 sub_item_value_obj_index = sub_item_info_index + MappingBucketOffsets.value_obj.value 3767 if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 3768 return bucket[sub_item_value_obj_index] 3769 3770 raise KeyError 3771 else: 3772 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3773 3774 @property 3775 def hash_bits(self) -> int: 3776 return self._hash_bits 3777 3778 @hash_bits.setter 3779 def hash_bits(self, value: int) -> None: 3780 self._hash_bits = value 3781 self._capacity = 2 ** value 3782 3783 @property 3784 def capacity(self) -> int: 3785 return self._capacity 3786 3787 @capacity.setter 3788 def capacity(self, value: int) -> None: 3789 if value <= self._capacity: 3790 return 3791 3792 if value <= 2: 3793 self.hash_bits = 1 3794 else: 3795 self.hash_bits = int(ceil(log2(value))) 3796 3797 def __str__(self) -> str: 3798 return dict(self).__str__() 3799 3800 def __repr__(self) -> str: 3801 return dict(self).__repr__() 3802 3803 def _free_mem(self): 3804 if self._offset is not None: 3805 for _, bucket in self.buckets.items(): 3806 self._shared_memory.destroy_obj(bucket._offset) 3807 3808 self.buckets.clear() 3809 if self.hashmap_offset is not None: 3810 self._shared_memory.destroy_obj(self.hashmap_offset) 3811 self.hashmap_offset = None 3812 3813 self._shared_memory.free(self._offset) 3814 self._offset = None 3815 3816 3817class IMappingIterator: 3818 def __init__(self, imapping: IMapping) -> None: 3819 self._imapping = imapping 3820 self._index = 0 3821 self._sub_index = 0 3822 3823 def __next__(self): 3824 while self._index < self._imapping.capacity: 3825 item_info_index: int = self._index * len(MappingHashmapItemOffsets) 3826 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3827 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3828 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3829 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3830 field_type = self._imapping.hashmap[field_type_index] 3831 if MappingHashmapFieldTypes.tnone.value == field_type: 3832 self._index += 1 3833 continue 3834 elif MappingHashmapFieldTypes.tobj.value == field_type: 3835 result = self._imapping.hashmap[item_bucket_index] 3836 self._index += 1 3837 break 3838 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3839 bucket = self._imapping.buckets[item_info_index] 3840 sub_item_info_index = self._sub_index 3841 sub_item_hash_index = sub_item_info_index * len(MappingBucketOffsets) + MappingBucketOffsets.field_hash.value 3842 sub_item_key_obj_index = sub_item_info_index * len(MappingBucketOffsets) + MappingBucketOffsets.key_obj.value 3843 sub_item_value_obj_index = sub_item_info_index * len(MappingBucketOffsets) + MappingBucketOffsets.value_obj.value 3844 if (sub_item_info_index * len(MappingBucketOffsets)) >= len(bucket): 3845 self._sub_index = 0 3846 self._index += 1 3847 continue 3848 3849 result = bucket[sub_item_key_obj_index] 3850 self._sub_index += 1 3851 break 3852 else: 3853 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3854 else: 3855 raise StopIteration 3856 3857 return result 3858 3859 def __iter__(self): 3860 return self 3861 3862 3863class TMapping: 3864 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMapping) -> Tuple[IMapping, Offset, Size]: 3865 obj: IMapping = IMapping(shared_memory, obj=obj) 3866 return obj, obj._offset, obj._obj_size 3867 3868 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMapping: 3869 if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset): 3870 raise WrongObjectTypeError 3871 3872 return IMapping(shared_memory, offset) 3873 3874 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 3875 if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset): 3876 raise WrongObjectTypeError 3877 3878 obj: IMapping = IMapping(shared_memory, offset) 3879 obj._free_mem() 3880 3881 3882# ====================================================================================================================== 3883# === MutableMapping ============================================================================================================= 3884 3885 3886class MutableMappingOffsets(IntEnum): 3887 size = 0 3888 capacity = 1 3889 hashmap_offset = 2 3890 refresh_counter = 3 3891 3892 3893class MutableMappingHashmapFieldTypes(IntEnum): 3894 tnone = 0 3895 tobj = 1 3896 tbucket = 2 3897 3898 3899class MutableMappingHashmapItemOffsets(IntEnum): 3900 field_type = 0 3901 field_hash = 1 3902 key_or_bucket = 2 3903 value_or_none = 3 3904 3905 3906class MutableMappingBucketFieldTypes(IntEnum): 3907 tnone = 0 3908 tobj = 1 3909 3910 3911class MutableMappingBucketOffsets(IntEnum): 3912 field_type = 0 3913 field_hash = 1 3914 key_obj = 2 3915 value_obj = 3 3916 3917 3918class IMutableMapping(BaseIObject, AbsMutableMapping): 3919 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', 'hashmap_offset', 'buckets', '_refresh_counter', '_offset__refresh_counter_offset', 'ignore_rehash') 3920 3921 # @property 3922 # def __mro__(self) -> Tuple: 3923 # return BaseIObject, AbsMutableMapping, dict 3924 3925 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableMapping = None) -> None: 3926 self._shared_memory = shared_memory 3927 self._base_address = shared_memory.base_address 3928 self._obj_size = None 3929 self._offset: Offset = None 3930 self._offset__data: Offset = None 3931 self._offset__size_offset: Offset = None 3932 self._offset__capacity_offset: Offset = None 3933 self._offset__hashmap_offset: Offset = None 3934 self._offset__refresh_counter_offset: Offset = None 3935 self._load_factor = 0.75 3936 self._load_factor_2 = 0.5625 3937 self._hash_bits: int = None 3938 self._capacity: int = None 3939 self._min_capacity: int = None 3940 self._size: int = None 3941 self.hashmap: IList = None 3942 self._refresh_counter: int = 0 3943 self.hashmap_offset: Offset = None 3944 self.buckets: Dict[int, IList] = dict() 3945 3946 self.ignore_rehash: bool = True 3947 3948 if offset is None: 3949 if obj is None: 3950 # obj = frozenset(set()) 3951 data_len = 16 3952 else: 3953 data_len = len(obj) 3954 3955 self._size: int = 0 3956 self.hash_bits = 1 3957 self.capacity = int(ceil(data_len / self._load_factor)) 3958 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 3959 3960 offset, self._obj_size = shared_memory.malloc(ObjectType.tmutablemapping, bs * len(MutableMappingOffsets)) 3961 created_items_offsets: List[Offset] = list() 3962 try: 3963 self._offset = offset 3964 offset__data = offset + bs * len(BaseObjOffsets) 3965 self._offset__data = offset__data 3966 self._offset__size_offset = offset__data + bs * MutableMappingOffsets.size.value 3967 self._offset__capacity_offset = offset__data + bs * MutableMappingOffsets.capacity.value 3968 self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 3969 self._offset__refresh_counter_offset = offset__data + bs * MutableMappingOffsets.refresh_counter.value 3970 3971 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 3972 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 3973 write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter) 3974 3975 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 3976 self.hashmap = cast(IList, self.hashmap) 3977 self.hashmap_offset = hashmap_offset 3978 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 3979 hashmap_capacity = self.capacity * len(MutableMappingHashmapItemOffsets) 3980 self.hashmap.set_capacity(hashmap_capacity) 3981 self.hashmap.extend_with(hashmap_capacity, 0) 3982 hash_bits: int = self.hash_bits 3983 if obj is None: 3984 pass 3985 elif isinstance(obj, IMutableMapping): 3986 self._move_from(obj) 3987 else: 3988 for key, value in obj.items(): 3989 self.__setitem__(key, value) 3990 3991 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 3992 3993 self.ignore_rehash = False 3994 3995 # print(f'Constructed {self.hashmap=}') 3996 # print(f'\tConstructed buckets:') 3997 # pdi(self.buckets) 3998 # for bucket_index, bucket in self.buckets.items(): 3999 # pdi(bucket) 4000 # print(f'\t\t{bucket_index}:', bucket) 4001 except: 4002 self._free_mem() 4003 raise 4004 else: 4005 self._refresh_hashmap(offset) 4006 self.ignore_rehash = False 4007 4008 # self._offset = offset 4009 # offset__data = offset + bs * len(BaseObjOffsets) 4010 # self._offset__data = offset__data 4011 # self._offset__size_offset: Offset = offset__data + bs * MutableMappingOffsets.size.value 4012 # self._offset__capacity_offset: Offset = offset__data + bs * MutableMappingOffsets.capacity.value 4013 # self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 4014 4015 # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 4016 # self.hash_bits = 1 4017 # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 4018 # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 4019 # self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 4020 4021 # self.hashmap_offset = hashmap_offset 4022 # self.hashmap = IList(shared_memory, hashmap_offset) 4023 # # print(f'Adopted by {type(self)}: {self.hashmap=}') 4024 # item_info_index: int = 0 4025 # # for item_info_index in range(self.capacity): 4026 # # field_type_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_type.value 4027 # # item_hash_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_hash.value 4028 # # item_bucket_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.key_or_bucket.value 4029 # # item_value_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.value_or_none.value 4030 # # field_type = self.hashmap[field_type_index] 4031 # # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4032 # # continue 4033 # # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4034 # # continue 4035 # # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4036 # # bucket_offset = self.hashmap[item_bucket_index] 4037 # # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4038 # # else: 4039 # # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4040 4041 # for item_info_index in range(0, self.capacity * len(MutableMappingHashmapItemOffsets), len(MutableMappingHashmapItemOffsets)): 4042 # field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4043 # item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4044 # item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4045 # item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4046 # field_type = self.hashmap[field_type_index] 4047 # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4048 # continue 4049 # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4050 # continue 4051 # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4052 # bucket_offset = self.hashmap[item_bucket_index] 4053 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4054 # else: 4055 # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4056 4057 # self.ignore_rehash = False 4058 4059 # # print(f'\tAdopted by {type(self)} buckets:') 4060 # # pdi(self.buckets) 4061 # # for bucket_index, bucket in self.buckets.items(): 4062 # # pdi(bucket) 4063 # # print(f'\t\t{bucket_index}:', bucket) 4064 4065 def _refresh_hashmap(self, offset: Offset): 4066 # print(f'~ refresh_hashmap {offset}: {intro_func_repr_limited()}') 4067 4068 # ignore_rehash = self.ignore_rehash 4069 # self.ignore_rehash = True 4070 4071 self._hash_bits = None 4072 self._capacity = None 4073 self._min_capacity = None 4074 self._size = None 4075 self.hashmap = None 4076 self._refresh_counter = 0 4077 self.hashmap_offset = None 4078 self.buckets = dict() 4079 4080 shared_memory = self._shared_memory 4081 self._offset = offset 4082 offset__data = offset + bs * len(BaseObjOffsets) 4083 self._offset__data = offset__data 4084 self._offset__size_offset: Offset = offset__data + bs * MutableMappingOffsets.size.value 4085 self._offset__capacity_offset: Offset = offset__data + bs * MutableMappingOffsets.capacity.value 4086 self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 4087 self._offset__refresh_counter_offset = offset__data + bs * MutableMappingOffsets.refresh_counter.value 4088 4089 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 4090 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 4091 self.hash_bits = 1 4092 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 4093 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 4094 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 4095 4096 self.hashmap_offset = hashmap_offset 4097 self.hashmap = IList(shared_memory, hashmap_offset) 4098 # print(f'Adopted by {type(self)}: {self.hashmap=}') 4099 # item_info_index: int = 0 4100 # for item_info_index in range(self.capacity): 4101 # field_type_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_type.value 4102 # item_hash_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_hash.value 4103 # item_bucket_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.key_or_bucket.value 4104 # item_value_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.value_or_none.value 4105 # field_type = self.hashmap[field_type_index] 4106 # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4107 # continue 4108 # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4109 # continue 4110 # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4111 # bucket_offset = self.hashmap[item_bucket_index] 4112 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4113 # else: 4114 # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4115 4116 for item_info_index in range(0, self.capacity * len(MutableMappingHashmapItemOffsets), len(MutableMappingHashmapItemOffsets)): 4117 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4118 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4119 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4120 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4121 field_type = self.hashmap[field_type_index] 4122 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4123 continue 4124 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4125 continue 4126 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4127 bucket_offset = self.hashmap[item_bucket_index] 4128 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4129 else: 4130 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4131 4132 # self.ignore_rehash = ignore_rehash 4133 4134 @property 4135 def refresh_counter(self): 4136 return read_uint64(self._base_address, self._offset__refresh_counter_offset) 4137 4138 def _increase_refresh_counter(self): 4139 if self.ignore_rehash: 4140 # print(f'~ ignore increase_refresh_counter {self._offset}: {intro_func_repr_limited()}') 4141 pass 4142 else: 4143 # print(f'~ increase_refresh_counter {self._offset}: {intro_func_repr_limited()}') 4144 # refresh_counter = read_uint64(self._base_address, self._offset__refresh_counter_offset) 4145 # if self._refresh_counter != refresh_counter: 4146 # print('~!!! increase_refresh_counter') 4147 4148 self._refresh_counter += 1 4149 write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter) 4150 4151 def _check_hashmap(self): 4152 if self.ignore_rehash: 4153 # print(f'~ ignore check_hashmap {self._offset}: {intro_func_repr_limited()}') 4154 return False 4155 else: 4156 base_address = self._base_address 4157 refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset) 4158 # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset) 4159 # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset): 4160 if self._refresh_counter != refresh_counter: 4161 # print(f'~ check_hashmap {self._offset}: {intro_func_repr_limited()}') 4162 self._refresh_hashmap(self._offset) 4163 return True 4164 4165 return False 4166 4167 # @property 4168 # def hashmap(self) -> IList: 4169 # if self.ignore_rehash: 4170 # return self._hashmap 4171 # else: 4172 # self._check_hashmap() 4173 # return self._hashmap 4174 4175 # @hashmap.setter 4176 # def hashmap(self, value: IList): 4177 # self._hashmap = value 4178 4179 def _increase_size(self): 4180 self._size += 1 4181 write_uint64(self._base_address, self._offset__size_offset, self._size) 4182 if (self._size > self._capacity) or (self._size < self._min_capacity): 4183 self._rehash() 4184 4185 def _decrease_size(self): 4186 self._size -= 1 4187 if self._size < 0: 4188 raise RuntimeError('Size of the set is negative') 4189 4190 write_uint64(self._base_address, self._offset__size_offset, self._size) 4191 if (self._size > self._capacity) or (self._size < self._min_capacity): 4192 self._rehash() 4193 4194 def _move_from(self, other: 'IMutableMapping'): 4195 for key_hash, key_type, key_offset, value_type, value_offset in other.iter_offset_pop(): 4196 self.setitem_as_offset(key_hash, key_type, key_offset, value_type, value_offset) 4197 4198 def _rehash(self): 4199 if self.ignore_rehash: 4200 # print(f'~ ignore rehash {self._offset}: {intro_func_repr_limited()}') 4201 return 4202 4203 # print(f'~ rehash {self._offset}: {intro_func_repr_limited()}') 4204 self._increase_refresh_counter() 4205 4206 ignore_rehash = self.ignore_rehash 4207 self.ignore_rehash = True 4208 4209 new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self) 4210 new_other = cast(IMutableMapping, new_other) 4211 4212 other_capacity = new_other._capacity 4213 other_hash_bits = new_other._hash_bits 4214 other_min_capacity = new_other._min_capacity 4215 other_size = new_other._size 4216 # refresh_counter = new_other._refresh_counter 4217 other_hashmap = new_other.hashmap 4218 other_hashmap_offset = new_other.hashmap_offset 4219 other_buckets = new_other.buckets 4220 other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset) 4221 other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset) 4222 other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset) 4223 # refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset) 4224 4225 new_other._capacity = self._capacity 4226 new_other._hash_bits = self._hash_bits 4227 new_other._min_capacity = self._min_capacity 4228 new_other._size = self._size 4229 # new_other._refresh_counter = self._refresh_counter 4230 new_other.hashmap = self.hashmap 4231 new_other.hashmap_offset = self.hashmap_offset 4232 new_other.buckets = self.buckets 4233 write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset)) 4234 write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset)) 4235 write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset)) 4236 # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset)) 4237 4238 self._capacity = other_capacity 4239 self._hash_bits = other_hash_bits 4240 self._min_capacity = other_min_capacity 4241 self._size = other_size 4242 # self._refresh_counter = refresh_counter 4243 self.hashmap = other_hashmap 4244 self.hashmap_offset = other_hashmap_offset 4245 self.buckets = other_buckets 4246 write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin) 4247 write_uint64(self._base_address, self._offset__size_offset, other_size_bin) 4248 write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin) 4249 # write_uint64(self._base_address, self._offset__refresh_counter_offset, refresh_counter_bin) 4250 4251 self._shared_memory.destroy_obj(new_other_offset) 4252 4253 self.ignore_rehash = ignore_rehash 4254 4255 def __len__(self): 4256 self._check_hashmap() 4257 return self._size 4258 4259 def __iter__(self): 4260 self._check_hashmap() 4261 return IMutableMappingIterator(self) 4262 4263 def iter_offset(self): 4264 self._check_hashmap() 4265 return IMutableMappingIteratorAsOffset(self) 4266 4267 def iter_offset_pop(self): 4268 self._check_hashmap() 4269 return IMutableMappingIteratorAsOffset(self, True) 4270 4271 # def __contains__(self, key: Hashable) -> bool: 4272 # item_hash = hash(key) 4273 # item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) 4274 # field_type_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_type.value 4275 # item_hash_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_hash.value 4276 # item_bucket_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.key_or_bucket.value 4277 # item_value_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.value_or_none.value 4278 # field_type = self.hashmap[field_type_index] 4279 # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4280 # return False 4281 # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4282 # return (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]) 4283 # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4284 # bucket = self.buckets[item_info_index] 4285 # for sub_item_info_index in range(0, len(bucket), len(MutableMappingBucketOffsets)): 4286 # bucket_field_type = bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] 4287 # if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4288 # continue 4289 4290 # sub_item_hash_index = sub_item_info_index + MutableMappingBucketOffsets.field_hash.value 4291 # sub_item_key_obj_index = sub_item_info_index + MutableMappingBucketOffsets.key_obj.value 4292 # sub_item_value_obj_index = sub_item_info_index + MutableMappingBucketOffsets.value_obj.value 4293 # if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 4294 # return True 4295 4296 # return False 4297 # else: 4298 # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4299 4300 def __getitem__(self, key: Hashable): 4301 self._check_hashmap() 4302 item_hash = hash(key) 4303 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4304 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4305 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4306 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4307 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4308 field_type = self.hashmap[field_type_index] 4309 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4310 raise KeyError 4311 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4312 if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 4313 return self.hashmap[item_value_index] 4314 else: 4315 raise KeyError 4316 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4317 bucket_offset = self.hashmap[item_bucket_index] 4318 try: 4319 bucket = self.buckets[item_info_index] 4320 if bucket._offset != bucket_offset: 4321 raise KeyError 4322 except KeyError: 4323 raise 4324 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4325 4326 for sub_item_info_index in range(0, len(bucket), len(MutableMappingBucketOffsets)): 4327 bucket_field_type = bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] 4328 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4329 continue 4330 4331 sub_item_hash_index = sub_item_info_index + MutableMappingBucketOffsets.field_hash.value 4332 sub_item_key_obj_index = sub_item_info_index + MutableMappingBucketOffsets.key_obj.value 4333 sub_item_value_obj_index = sub_item_info_index + MutableMappingBucketOffsets.value_obj.value 4334 if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 4335 return bucket[sub_item_value_obj_index] 4336 4337 raise KeyError 4338 else: 4339 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4340 4341 def __setitem__(self, key, value): 4342 self._check_hashmap() 4343 key_hash = hash(key) 4344 item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4345 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4346 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4347 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4348 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4349 field_type = self.hashmap[field_type_index] 4350 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4351 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tobj.value 4352 self.hashmap[item_hash_index] = key_hash 4353 self.hashmap[item_bucket_index] = key 4354 self.hashmap[item_value_index] = value 4355 self._increase_size() 4356 return 4357 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4358 if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 4359 self.hashmap[item_value_index] = value 4360 return 4361 4362 self._increase_refresh_counter() 4363 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 4364 bucket = cast(IList, bucket) 4365 bucket.set_capacity(len(MutableMappingBucketOffsets)) 4366 bucket.extend_with(len(MutableMappingBucketOffsets), 0) 4367 self.buckets[item_info_index] = bucket 4368 bucket[MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4369 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableMappingBucketOffsets.field_hash.value) 4370 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableMappingBucketOffsets.key_obj.value) 4371 self.hashmap.move_item_to_list(item_value_index, bucket, MutableMappingBucketOffsets.value_obj.value) 4372 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tbucket.value 4373 self.hashmap[item_bucket_index] = bucket_offset 4374 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4375 bucket.append(key_hash) 4376 bucket.append(key) 4377 bucket.append(value) 4378 self._increase_size() 4379 return 4380 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4381 bucket_offset = self.hashmap[item_bucket_index] 4382 try: 4383 bucket = self.buckets[item_info_index] 4384 if bucket._offset != bucket_offset: 4385 raise KeyError 4386 except KeyError: 4387 raise 4388 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4389 4390 bucket_len: int = len(bucket) 4391 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4392 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4393 if MutableMappingBucketFieldTypes.tobj.value == bucket_field_type: 4394 if (key_hash == bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value]) and (key == bucket[bucket_item_index + MutableMappingBucketOffsets.key_obj.value]): 4395 bucket[bucket_item_index + MutableMappingBucketOffsets.value_obj.value] = value 4396 return 4397 4398 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4399 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4400 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4401 bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4402 bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value] = key_hash 4403 bucket[bucket_item_index + MutableMappingBucketOffsets.key_obj.value] = key 4404 bucket[bucket_item_index + MutableMappingBucketOffsets.value_obj.value] = value 4405 self._increase_size() 4406 return 4407 else: 4408 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4409 bucket.append(key_hash) 4410 bucket.append(key) 4411 bucket.append(value) 4412 self._increase_size() 4413 return 4414 else: 4415 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4416 4417 def setitem_as_offset(self, key_hash, key_type, key_offset, value_type, value_offset): 4418 self._check_hashmap() 4419 key = (key_type, key_offset) 4420 value = (value_type, value_offset) 4421 item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4422 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4423 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4424 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4425 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4426 field_type = self.hashmap[field_type_index] 4427 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4428 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tobj.value 4429 self.hashmap[item_hash_index] = key_hash 4430 self.hashmap.setitem_as_offset(item_bucket_index, key) 4431 self.hashmap.setitem_as_offset(item_value_index, value) 4432 self._increase_size() 4433 return 4434 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4435 if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap.getitem_as_offset(item_bucket_index)): 4436 self.hashmap.setitem_as_offset(item_value_index, value) 4437 return 4438 4439 self._increase_refresh_counter() 4440 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 4441 bucket = cast(IList, bucket) 4442 bucket.set_capacity(len(MutableMappingBucketOffsets)) 4443 bucket.extend_with(len(MutableMappingBucketOffsets), 0) 4444 self.buckets[item_info_index] = bucket 4445 bucket[MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4446 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableMappingBucketOffsets.field_hash.value) 4447 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableMappingBucketOffsets.key_obj.value) 4448 self.hashmap.move_item_to_list(item_value_index, bucket, MutableMappingBucketOffsets.value_obj.value) 4449 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tbucket.value 4450 self.hashmap[item_bucket_index] = bucket_offset 4451 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4452 bucket.append(key_hash) 4453 bucket.append_as_offset(key) 4454 bucket.append_as_offset(value) 4455 self._increase_size() 4456 return 4457 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4458 bucket_offset = self.hashmap[item_bucket_index] 4459 try: 4460 bucket = self.buckets[item_info_index] 4461 if bucket._offset != bucket_offset: 4462 raise KeyError 4463 except KeyError: 4464 raise 4465 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4466 4467 bucket_len: int = len(bucket) 4468 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4469 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4470 if MutableMappingBucketFieldTypes.tobj.value == bucket_field_type: 4471 if (key_hash == bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value]) and (key == bucket.getitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.key_obj.value)): 4472 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.value_obj.value, value) 4473 return 4474 4475 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4476 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4477 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4478 bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4479 bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value] = key_hash 4480 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.key_obj.value, key) 4481 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.value_obj.value, value) 4482 self._increase_size() 4483 return 4484 else: 4485 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4486 bucket.append(key_hash) 4487 bucket.append_as_offset(key) 4488 bucket.append_as_offset(value) 4489 self._increase_size() 4490 return 4491 else: 4492 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4493 4494 def __delitem__(self, key): 4495 self._check_hashmap() 4496 item_hash = hash(key) 4497 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4498 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4499 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4500 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4501 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4502 field_type = self.hashmap[field_type_index] 4503 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4504 raise KeyError 4505 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4506 if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 4507 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tnone.value 4508 self.hashmap[item_hash_index] = None 4509 self.hashmap[item_bucket_index] = None 4510 self.hashmap[item_value_index] = None 4511 self._decrease_size() 4512 return 4513 else: 4514 raise KeyError 4515 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4516 bucket_offset = self.hashmap[item_bucket_index] 4517 try: 4518 bucket = self.buckets[item_info_index] 4519 if bucket._offset != bucket_offset: 4520 raise KeyError 4521 except KeyError: 4522 raise 4523 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4524 4525 for sub_item_info_index in range(0, len(bucket), len(MutableMappingBucketOffsets)): 4526 bucket_field_type = bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] 4527 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4528 continue 4529 4530 sub_item_hash_index = sub_item_info_index + MutableMappingBucketOffsets.field_hash.value 4531 sub_item_key_obj_index = sub_item_info_index + MutableMappingBucketOffsets.key_obj.value 4532 sub_item_value_obj_index = sub_item_info_index + MutableMappingBucketOffsets.value_obj.value 4533 if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 4534 bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tnone.value 4535 bucket[sub_item_hash_index] = None 4536 bucket[sub_item_key_obj_index] = None 4537 bucket[sub_item_value_obj_index] = None 4538 self._decrease_size() 4539 return 4540 4541 raise KeyError 4542 else: 4543 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4544 4545 @property 4546 def hash_bits(self) -> int: 4547 return self._hash_bits 4548 4549 @hash_bits.setter 4550 def hash_bits(self, value: int) -> None: 4551 self._hash_bits = value 4552 self._capacity = 2 ** value 4553 4554 @property 4555 def capacity(self) -> int: 4556 return self._capacity 4557 4558 @capacity.setter 4559 def capacity(self, value: int) -> None: 4560 if value <= self._capacity: 4561 return 4562 4563 if value <= 2: 4564 self.hash_bits = 1 4565 else: 4566 self.hash_bits = int(ceil(log2(value))) 4567 4568 def __str__(self) -> str: 4569 self._check_hashmap() 4570 return dict(self).__str__() 4571 4572 def __repr__(self) -> str: 4573 self._check_hashmap() 4574 return dict(self).__repr__() 4575 4576 def _free_mem(self): 4577 if self._offset is not None: 4578 if self.hashmap_offset is not None: 4579 self._check_hashmap() 4580 4581 for _, bucket in self.buckets.items(): 4582 self._shared_memory.destroy_obj(bucket._offset) 4583 self.buckets.clear() 4584 if self.hashmap_offset is not None: 4585 self._shared_memory.destroy_obj(self.hashmap_offset) 4586 self.hashmap_offset = None 4587 4588 self._shared_memory.free(self._offset) 4589 self._offset = None 4590 4591 4592class IMutableMappingIterator: 4593 def __init__(self, imapping: IMutableMapping) -> None: 4594 self._imapping = imapping 4595 self._index = 0 4596 self._sub_index = 0 4597 4598 def __next__(self): 4599 if self._imapping._check_hashmap(): 4600 raise RuntimeError("Dictionary's hashmap changed during iteration") 4601 4602 while self._index < self._imapping.capacity: 4603 item_info_index: int = self._index * len(MutableMappingHashmapItemOffsets) 4604 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4605 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4606 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4607 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4608 field_type = self._imapping.hashmap[field_type_index] 4609 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4610 self._index += 1 4611 continue 4612 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4613 result = self._imapping.hashmap[item_bucket_index] 4614 self._index += 1 4615 return result 4616 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4617 bucket_offset = self._imapping.hashmap[item_bucket_index] 4618 try: 4619 bucket = self._imapping.buckets[item_info_index] 4620 if bucket._offset != bucket_offset: 4621 raise KeyError 4622 except KeyError: 4623 raise 4624 self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset) 4625 4626 bucket_len = len(bucket) 4627 sub_item_info_index = self._sub_index 4628 while (sub_item_info_index * len(MutableMappingBucketOffsets)) < bucket_len: 4629 sub_item_field_type_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_type.value 4630 if bucket[sub_item_field_type_index] == MutableMappingBucketFieldTypes.tnone.value: 4631 sub_item_info_index += 1 4632 continue 4633 4634 sub_item_hash_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_hash.value 4635 sub_item_key_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.key_obj.value 4636 sub_item_value_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.value_obj.value 4637 result = bucket[sub_item_key_obj_index] 4638 self._sub_index += 1 4639 return result 4640 else: 4641 self._sub_index = 0 4642 self._index += 1 4643 continue 4644 else: 4645 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4646 else: 4647 raise StopIteration 4648 4649 def __iter__(self): 4650 return self 4651 4652 4653class IMutableMappingIteratorAsOffset: 4654 def __init__(self, imapping: IMutableMapping, pop: bool = False) -> None: 4655 self._imapping = imapping 4656 self._pop: bool = pop 4657 self._index = 0 4658 self._sub_index = 0 4659 4660 def __next__(self): 4661 if self._imapping._check_hashmap(): 4662 raise RuntimeError("Dictionary's hashmap changed during iteration") 4663 4664 while self._index < self._imapping.capacity: 4665 item_info_index: int = self._index * len(MutableMappingHashmapItemOffsets) 4666 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4667 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4668 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4669 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4670 field_type = self._imapping.hashmap[field_type_index] 4671 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4672 self._index += 1 4673 continue 4674 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4675 key_hash = self._imapping.hashmap[item_hash_index] 4676 key_type, key_offset = self._imapping.hashmap.getitem_as_offset(item_bucket_index) 4677 value_type, value_offset = self._imapping.hashmap.getitem_as_offset(item_value_index) 4678 if self._pop: 4679 self._imapping.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tnone.value 4680 self._imapping.hashmap[item_hash_index] = None 4681 self._imapping.hashmap.setitem_as_offset(item_bucket_index, (InternalListFieldTypes.tnone.value, 0), False) 4682 self._imapping.hashmap.setitem_as_offset(item_value_index, (InternalListFieldTypes.tnone.value, 0), False) 4683 4684 self._index += 1 4685 return key_hash, key_type, key_offset, value_type, value_offset 4686 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4687 bucket_offset = self._imapping.hashmap[item_bucket_index] 4688 try: 4689 bucket = self._imapping.buckets[item_info_index] 4690 if bucket._offset != bucket_offset: 4691 raise KeyError 4692 except KeyError: 4693 raise 4694 self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset) 4695 4696 bucket_len = len(bucket) 4697 sub_item_info_index = self._sub_index 4698 while (sub_item_info_index * len(MutableMappingBucketOffsets)) < bucket_len: 4699 sub_item_field_type_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_type.value 4700 if bucket[sub_item_field_type_index] == MutableMappingBucketFieldTypes.tnone.value: 4701 sub_item_info_index += 1 4702 continue 4703 4704 sub_item_hash_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_hash.value 4705 sub_item_key_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.key_obj.value 4706 sub_item_value_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.value_obj.value 4707 4708 key_hash = bucket[sub_item_hash_index] 4709 key_type, key_offset = bucket.getitem_as_offset(sub_item_key_obj_index) 4710 value_type, value_offset = bucket.getitem_as_offset(sub_item_value_obj_index) 4711 if self._pop: 4712 bucket[sub_item_field_type_index] = MutableMappingHashmapFieldTypes.tnone.value 4713 bucket[sub_item_hash_index] = None 4714 bucket.setitem_as_offset(sub_item_key_obj_index, (InternalListFieldTypes.tnone.value, 0), False) 4715 bucket.setitem_as_offset(sub_item_value_obj_index, (InternalListFieldTypes.tnone.value, 0), False) 4716 4717 self._sub_index += 1 4718 return key_hash, key_type, key_offset, value_type, value_offset 4719 else: 4720 self._sub_index = 0 4721 self._index += 1 4722 continue 4723 else: 4724 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4725 else: 4726 raise StopIteration 4727 4728 def __iter__(self): 4729 return self 4730 4731 4732class TMutableMapping: 4733 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMutableMapping) -> Tuple[IMutableMapping, Offset, Size]: 4734 obj: IMutableMapping = IMutableMapping(shared_memory, obj=obj) 4735 return obj, obj._offset, obj._obj_size 4736 4737 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableMapping: 4738 if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset): 4739 raise WrongObjectTypeError 4740 4741 return IMutableMapping(shared_memory, offset) 4742 4743 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 4744 if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset): 4745 raise WrongObjectTypeError 4746 4747 obj: IMutableMapping = IMutableMapping(shared_memory, offset) 4748 obj._free_mem() 4749 4750 4751# ====================================================================================================================== 4752# === General Object ============================================================================================================= 4753 4754 4755class ForceGeneralObjectCopy: 4756 def __init__(self, obj: Any) -> None: 4757 self.obj = obj 4758 4759 4760FGeneralObjectCopy = ForceGeneralObjectCopy 4761forcegeneralobjectcopy = ForceGeneralObjectCopy 4762fgeneralobjectcopy = ForceGeneralObjectCopy 4763 4764 4765class ForceGeneralObjectInplace: 4766 def __init__(self, obj: Any) -> None: 4767 self.obj = obj 4768 4769 4770FGeneralObjectInplace = ForceGeneralObjectInplace 4771forcegeneralobjectinplace = ForceGeneralObjectInplace 4772fgeneralobjectinplace = ForceGeneralObjectInplace 4773 4774 4775class GeneralObjectOffsets(IntEnum): 4776 pickled_obj = 0 4777 obj_dict = 1 4778 setable_data_descriptor_field_names = 2 4779 4780 4781def tgeneralobject_custom_getattribute(self, name): 4782 if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'): 4783 return object.__getattribute__(self, name) 4784 4785 try: 4786 return self._tgeneralobject_imutablemapping_attributes[name] 4787 except KeyError: 4788 pass 4789 4790 return object.__getattribute__(self, name) 4791 4792 4793def tgeneralobject_custom_setattr(self, name, value): 4794 if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'): 4795 object.__setattr__(self, name, value) 4796 else: 4797 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 4798 object.__setattr__(self, name, value) 4799 return 4800 4801 # try: 4802 # if name in self._tgeneralobject_setable_data_descriptor_field_names: 4803 # object.__setattr__(self, name, value) 4804 # except AttributeError: 4805 # pass 4806 4807 self._tgeneralobject_imutablemapping_attributes[name] = value 4808 4809 4810def tgeneralobject_custom_delattr(self, name): 4811 if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'): 4812 object.__delattr__(self, name) 4813 else: 4814 has_value_static: bool = False 4815 value_static = None 4816 try: 4817 value_static = getattr_static(self, name) 4818 has_value_static = True 4819 except AttributeError: 4820 pass 4821 4822 deleted: bool = False 4823 try: 4824 if has_value_static and isfunction(value_static) or ismethod(value_static) or isinstance(value_static, FrameType) or isinstance(value_static, CodeType) or ismethoddescriptor(value_static): 4825 object.__delattr__(self, name) 4826 return 4827 except AttributeError: 4828 pass 4829 4830 try: 4831 if has_value_static and (not isclass(value_static)) and hasattr(value_static, "__delete__"): 4832 object.__delattr__(self, name) 4833 deleted = True 4834 except AttributeError: 4835 pass 4836 4837 try: 4838 del self._tgeneralobject_imutablemapping_attributes[name] 4839 return 4840 except KeyError: 4841 pass 4842 4843 if not deleted: 4844 object.__delattr__(self, name) 4845 4846 4847def tgeneralobject_wrap_obj(obj, mapped_obj_dict: IMutableMapping, setable_data_descriptor_field_names: Set[str], init_mapped_obj_dict: bool): 4848 base = obj.__class__ 4849 setattr(obj, '_tgeneralobject_imutablemapping_attributes', mapped_obj_dict) 4850 setattr(obj, '_tgeneralobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names) 4851 if init_mapped_obj_dict: 4852 object_fields = set(dir(object)) 4853 obj_fields = set(dir(obj)) - object_fields 4854 for key in obj_fields: 4855 value = getattr_static(obj, key) 4856 if key in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or key.startswith('__'): 4857 continue 4858 4859 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 4860 continue 4861 4862 if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))): 4863 continue 4864 4865 if is_setable_data_descriptor(value): 4866 setable_data_descriptor_field_names.add(key) 4867 4868 mapped_obj_dict[key] = getattr(obj, key) 4869 4870 NewClass = type( 4871 base.__name__ + 'WrappedByTGeneralObject', 4872 (base,), 4873 { 4874 '__getattribute__': tgeneralobject_custom_getattribute, 4875 '__setattr__': tgeneralobject_custom_setattr, 4876 '__delattr__': tgeneralobject_custom_delattr, 4877 } 4878 ) 4879 obj.__class__ = NewClass 4880 4881 4882class TGeneralObject: 4883 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 4884 offset, real_size = shared_memory.malloc(ObjectType.tgeneralobject, bs * len(GeneralObjectOffsets)) 4885 created_items_offsets: List[Offset] = list() 4886 try: 4887 make_changes_inplace: bool = True 4888 if isinstance(obj, ForceGeneralObjectCopy): 4889 obj = obj.obj 4890 make_changes_inplace = False 4891 elif isinstance(obj, ForceGeneralObjectInplace): 4892 obj = obj.obj 4893 make_changes_inplace = True 4894 4895 dumped_obj: bytes = pickle_dumps(obj) 4896 dumped_mapped_obj_type, dumped_obj_offset, dumped_obj_type_size = shared_memory.put_obj(dumped_obj) 4897 created_items_offsets.append(dumped_obj_offset) 4898 mapped_obj_dict, obj_dict_offset, obj_dict_size = shared_memory.put_obj(dict()) 4899 created_items_offsets.append(obj_dict_offset) 4900 4901 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj, dumped_obj_offset) 4902 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.obj_dict, obj_dict_offset) 4903 4904 setable_data_descriptor_field_names: Set[str] = set() 4905 4906 mapped_obj = None 4907 if make_changes_inplace: 4908 tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, True) 4909 mapped_obj = obj 4910 else: 4911 # mapped_obj = self.init_from_shared_memory(shared_memory, offset) 4912 mapped_obj = pickle_loads(dumped_obj) 4913 tgeneralobject_wrap_obj(mapped_obj, mapped_obj_dict, setable_data_descriptor_field_names, True) 4914 4915 dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names) 4916 mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names) 4917 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset) 4918 except: 4919 shared_memory.free(offset) 4920 for item_offset in created_items_offsets: 4921 shared_memory.destroy_obj(item_offset) 4922 4923 raise 4924 4925 return mapped_obj, offset, real_size 4926 4927 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 4928 if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset): 4929 raise WrongObjectTypeError 4930 4931 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4932 dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset) 4933 4934 obj_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.obj_dict) 4935 mapped_obj_dict = shared_memory.get_obj(obj_dict_offset) 4936 obj = pickle_loads(dumped_obj) 4937 4938 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.setable_data_descriptor_field_names) 4939 dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset) 4940 setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names) 4941 4942 tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, False) 4943 return obj 4944 4945 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 4946 if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset): 4947 raise WrongObjectTypeError 4948 4949 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4950 shared_memory.destroy_obj(dumped_obj_offset) 4951 obj_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.obj_dict) 4952 if obj_dict_offset: 4953 shared_memory.destroy_obj(obj_dict_offset) 4954 4955 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.setable_data_descriptor_field_names) 4956 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 4957 shared_memory.free(offset) 4958 4959 # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 4960 # if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 4961 # raise WrongObjectTypeError 4962 4963 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4964 # return shared_memory.get_obj_buffer(dumped_obj_offset) 4965 4966 # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 4967 # if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 4968 # raise WrongObjectTypeError 4969 4970 4971 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4972 # return shared_memory.get_obj_buffer_2(dumped_obj_offset) 4973 4974 4975# ====================================================================================================================== 4976# === Static Object ============================================================================================================= 4977 4978 4979class ForceStaticObjectCopy: 4980 def __init__(self, obj: Any) -> None: 4981 self.obj = obj 4982 4983 4984FStaticObjectCopy = ForceStaticObjectCopy 4985forcestaticobjectcopy = ForceStaticObjectCopy 4986fstaticobjectcopy = ForceStaticObjectCopy 4987 4988 4989class ForceStaticObjectInplace: 4990 def __init__(self, obj: Any) -> None: 4991 self.obj = obj 4992 4993 4994FStaticObjectInplace = ForceStaticObjectInplace 4995forcestaticobjectinplace = ForceStaticObjectInplace 4996fstaticobjectinplace = ForceStaticObjectInplace 4997 4998 4999class StaticObjectOffsets(IntEnum): 5000 pickled_obj = 0 5001 pickled_attributes_dict = 1 5002 attributes_slots = 2 5003 setable_data_descriptor_field_names = 3 5004 5005 5006def tstaticobject_custom_getattribute(self, name): 5007 if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'): 5008 return object.__getattribute__(self, name) 5009 5010 try: 5011 return self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]] 5012 except KeyError: 5013 pass 5014 5015 return object.__getattribute__(self, name) 5016 5017 5018def tstaticobject_custom_setattr(self, name, value): 5019 if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'): 5020 object.__setattr__(self, name, value) 5021 else: 5022 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5023 object.__setattr__(self, name, value) 5024 return 5025 5026 # try: 5027 # if name in self._tstaticobject_setable_data_descriptor_field_names: 5028 # object.__setattr__(self, name, value) 5029 # except AttributeError: 5030 # pass 5031 5032 try: 5033 self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]] = value 5034 return 5035 except KeyError: 5036 pass 5037 5038 object.__setattr__(self, name, value) 5039 5040 5041def tstaticobject_custom_delattr(self, name): 5042 if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'): 5043 object.__delattr__(self, name) 5044 else: 5045 if name in self._tstaticobject_attributes_dict: 5046 raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only") 5047 else: 5048 object.__delattr__(self, name) 5049 5050 5051def tstaticobject_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool): 5052 base = obj.__class__ 5053 setattr(obj, '_tstaticobject_attributes_dict', attributes_dict) 5054 setattr(obj, '_tstaticobject_attributes_slots', attributes_slots) 5055 setattr(obj, '_tstaticobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names) 5056 if init_mapped_attributes: 5057 object_fields = set(dir(object)) 5058 obj_fields = set(dir(obj)) - object_fields 5059 good_fields: List[Hashable] = list() 5060 for key in obj_fields: 5061 value = getattr_static(obj, key) 5062 if key in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or key.startswith('__'): 5063 continue 5064 5065 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5066 continue 5067 5068 if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))): 5069 continue 5070 5071 if is_setable_data_descriptor(value): 5072 setable_data_descriptor_field_names.add(key) 5073 5074 good_fields.append(key) 5075 5076 good_fields_len = len(good_fields) 5077 attributes_slots.set_capacity(good_fields_len) 5078 attributes_slots.extend_with(good_fields_len, 0) 5079 for index, key in enumerate(good_fields): 5080 attributes_dict[key] = index 5081 value = getattr(obj, key) 5082 attributes_slots[index] = value 5083 5084 NewClass = type( 5085 base.__name__ + 'WrappedByTStaticObject', 5086 (base,), 5087 { 5088 '__getattribute__': tstaticobject_custom_getattribute, 5089 '__setattr__': tstaticobject_custom_setattr, 5090 '__delattr__': tstaticobject_custom_delattr, 5091 } 5092 ) 5093 obj.__class__ = NewClass 5094 5095 5096class TStaticObject: 5097 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 5098 offset, real_size = shared_memory.malloc(ObjectType.tstaticobject, bs * len(StaticObjectOffsets)) 5099 created_items_offsets: List[Offset] = list() 5100 try: 5101 make_changes_inplace: bool = True 5102 if isinstance(obj, ForceStaticObjectCopy): 5103 obj = obj.obj 5104 make_changes_inplace = False 5105 elif isinstance(obj, ForceStaticObjectInplace): 5106 obj = obj.obj 5107 make_changes_inplace = True 5108 5109 dumped_obj: bytes = pickle_dumps(obj) 5110 dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj) 5111 created_items_offsets.append(dumped_obj_offset) 5112 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj, dumped_obj_offset) 5113 5114 attributes_dict: Dict = dict() 5115 5116 attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list()) 5117 created_items_offsets.append(attributes_slots_offset) 5118 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.attributes_slots, attributes_slots_offset) 5119 5120 setable_data_descriptor_field_names: Set[str] = set() 5121 5122 mapped_obj = None 5123 if make_changes_inplace: 5124 tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True) 5125 mapped_obj = obj 5126 else: 5127 # mapped_obj = self.init_from_shared_memory(shared_memory, offset) 5128 mapped_obj = pickle_loads(dumped_obj) 5129 tstaticobject_wrap_obj(mapped_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True) 5130 5131 dumped_attributes_dict: bytes = pickle_dumps(attributes_dict) 5132 dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict) 5133 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_attributes_dict, dumped_attributes_dict_offset) 5134 5135 dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names) 5136 mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names) 5137 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset) 5138 except: 5139 shared_memory.free(offset) 5140 for item_offset in created_items_offsets: 5141 shared_memory.destroy_obj(item_offset) 5142 5143 raise 5144 5145 return mapped_obj, offset, real_size 5146 5147 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 5148 if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset): 5149 raise WrongObjectTypeError 5150 5151 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5152 dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset) 5153 obj = pickle_loads(dumped_obj) 5154 5155 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.attributes_slots) 5156 attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset) 5157 5158 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_attributes_dict) 5159 dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset) 5160 attributes_dict = pickle_loads(dumped_attributes_dict) 5161 5162 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.setable_data_descriptor_field_names) 5163 dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset) 5164 setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names) 5165 5166 tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False) 5167 return obj 5168 5169 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5170 if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset): 5171 raise WrongObjectTypeError 5172 5173 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5174 shared_memory.destroy_obj(dumped_obj_offset) 5175 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.attributes_slots) 5176 shared_memory.destroy_obj(attributes_slots_offset) 5177 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_attributes_dict) 5178 shared_memory.destroy_obj(dumped_attributes_dict_offset) 5179 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.setable_data_descriptor_field_names) 5180 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 5181 shared_memory.free(offset) 5182 5183 # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5184 # if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5185 # raise WrongObjectTypeError 5186 5187 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5188 # return shared_memory.get_obj_buffer(dumped_obj_offset) 5189 5190 # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5191 # if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5192 # raise WrongObjectTypeError 5193 5194 5195 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5196 # return shared_memory.get_obj_buffer_2(dumped_obj_offset) 5197 5198 5199# ====================================================================================================================== 5200# === Static Object With Slots ============================================================================================================= 5201 5202 5203class StaticObjectWithSlotsOffsets(IntEnum): 5204 pickled_obj = 0 5205 pickled_attributes_dict = 1 5206 attributes_slots = 2 5207 setable_data_descriptor_field_names = 3 5208 5209 5210def tstaticobjectwithslots_custom_getattribute(self, name): 5211 if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'): 5212 return object.__getattribute__(self, name) 5213 5214 try: 5215 return self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]] 5216 except KeyError: 5217 pass 5218 5219 return object.__getattribute__(self, name) 5220 5221 5222def tstaticobjectwithslots_custom_setattr(self, name, value): 5223 if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'): 5224 object.__setattr__(self, name, value) 5225 else: 5226 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5227 object.__setattr__(self, name, value) 5228 return 5229 5230 # try: 5231 # if name in self._tstaticobjectwithslots_setable_data_descriptor_field_names: 5232 # object.__setattr__(self, name, value) 5233 # except AttributeError: 5234 # pass 5235 5236 try: 5237 self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]] = value 5238 return 5239 except KeyError: 5240 pass 5241 5242 object.__setattr__(self, name, value) 5243 5244 5245def tstaticobjectwithslots_custom_delattr(self, name): 5246 if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'): 5247 object.__delattr__(self, name) 5248 else: 5249 if name in self._tstaticobjectwithslots_attributes_dict: 5250 raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only") 5251 else: 5252 object.__delattr__(self, name) 5253 5254 5255def tstaticobjectwithslots_custom_init(self, original, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names): 5256 setattr(self, '_tstaticobjectwithslots_attributes_dict', attributes_dict) 5257 setattr(self, '_tstaticobjectwithslots_attributes_slots', attributes_slots) 5258 setattr(self, '_tstaticobjectwithslots_setable_data_descriptor_field_names', setable_data_descriptor_field_names) 5259 for attr_name in good_fields: 5260 setattr(self, attr_name, getattr(original, attr_name)) 5261 5262 5263def tstaticobjectwithslots_custom_eq(self, other): 5264 parent_class = self.__class__.__bases__[0] 5265 if not isinstance(other, (type(self), parent_class)): 5266 return NotImplemented 5267 5268 for key in self._tstaticobjectwithslots_attributes_dict.keys(): 5269 if not hasattr(other, key): 5270 return False 5271 5272 if getattr(self, key) != getattr(other, key): 5273 return False 5274 5275 return True 5276 5277 5278def tstaticobjectwithslots_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool) -> Any: 5279 base = obj.__class__ 5280 5281 good_fields: List[Hashable] = list() 5282 if init_mapped_attributes: 5283 if hasattr(base, '__slots__'): 5284 obj_fields = base.__slots__ 5285 else: 5286 object_fields = set(dir(object)) 5287 obj_fields = set(dir(obj)) - object_fields 5288 5289 for key in obj_fields: 5290 value = getattr_static(obj, key) 5291 if key in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or key.startswith('__'): 5292 continue 5293 5294 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5295 continue 5296 5297 if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))): 5298 continue 5299 5300 if is_setable_data_descriptor(value): 5301 setable_data_descriptor_field_names.add(key) 5302 5303 good_fields.append(key) 5304 5305 good_fields_len = len(good_fields) 5306 attributes_slots.set_capacity(good_fields_len) 5307 attributes_slots.extend_with(good_fields_len, 0) 5308 for index, key in enumerate(good_fields): 5309 attributes_dict[key] = index 5310 value = getattr(obj, key) 5311 attributes_slots[index] = value 5312 5313 NewClass = type( 5314 base.__name__ + 'WrappedByTStaticObjectWithSlots', 5315 (base,), 5316 { 5317 '__slots__': ['__dict__'], 5318 '__init__': tstaticobjectwithslots_custom_init, 5319 '__eq__': tstaticobjectwithslots_custom_eq, 5320 '__getattribute__': tstaticobjectwithslots_custom_getattribute, 5321 '__setattr__': tstaticobjectwithslots_custom_setattr, 5322 '__delattr__': tstaticobjectwithslots_custom_delattr, 5323 } 5324 ) 5325 5326 new_obj = NewClass(obj, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names) 5327 5328 return new_obj 5329 5330 5331class TStaticObjectWithSlots: 5332 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 5333 offset, real_size = shared_memory.malloc(ObjectType.tstaticobjectwithslots, bs * len(StaticObjectWithSlotsOffsets)) 5334 created_items_offsets: List[Offset] = list() 5335 try: 5336 dumped_obj: bytes = pickle_dumps(obj) 5337 dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj) 5338 created_items_offsets.append(dumped_obj_offset) 5339 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj, dumped_obj_offset) 5340 5341 attributes_dict: Dict = dict() 5342 5343 attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list()) 5344 created_items_offsets.append(attributes_slots_offset) 5345 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.attributes_slots, attributes_slots_offset) 5346 5347 setable_data_descriptor_field_names: Set[str] = set() 5348 5349 mapped_obj = None 5350 loaded_obj = pickle_loads(dumped_obj) 5351 mapped_obj = tstaticobjectwithslots_wrap_obj(loaded_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True) 5352 5353 dumped_attributes_dict: bytes = pickle_dumps(attributes_dict) 5354 dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict) 5355 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_attributes_dict, dumped_attributes_dict_offset) 5356 5357 dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names) 5358 mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names) 5359 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset) 5360 except: 5361 shared_memory.free(offset) 5362 for item_offset in created_items_offsets: 5363 shared_memory.destroy_obj(item_offset) 5364 5365 raise 5366 5367 return mapped_obj, offset, real_size 5368 5369 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 5370 if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset): 5371 raise WrongObjectTypeError 5372 5373 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5374 dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset) 5375 obj = pickle_loads(dumped_obj) 5376 5377 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.attributes_slots) 5378 attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset) 5379 5380 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_attributes_dict) 5381 dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset) 5382 attributes_dict = pickle_loads(dumped_attributes_dict) 5383 5384 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names) 5385 dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset) 5386 setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names) 5387 5388 mapped_obj = tstaticobjectwithslots_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False) 5389 return mapped_obj 5390 5391 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5392 if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset): 5393 raise WrongObjectTypeError 5394 5395 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5396 shared_memory.destroy_obj(dumped_obj_offset) 5397 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.attributes_slots) 5398 shared_memory.destroy_obj(attributes_slots_offset) 5399 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_attributes_dict) 5400 shared_memory.destroy_obj(dumped_attributes_dict_offset) 5401 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names) 5402 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 5403 shared_memory.free(offset) 5404 5405 # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5406 # if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5407 # raise WrongObjectTypeError 5408 5409 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5410 # return shared_memory.get_obj_buffer(dumped_obj_offset) 5411 5412 # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5413 # if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5414 # raise WrongObjectTypeError 5415 5416 5417 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5418 # return shared_memory.get_obj_buffer_2(dumped_obj_offset) 5419 5420 5421# ====================================================================================================================== 5422# === Numpy ndarray ============================================================================================================= 5423 5424 5425class TNumpyNdarrayOffsets(IntEnum): 5426 data_buffer_offset = 0 5427 shape_tuple_offset = 1 5428 pickled_datatype_offset = 2 5429 5430 5431class TNumpyNdarray: 5432 def map_to_shared_memory(self, shared_memory: 'SharedMemory', nparray: np.ndarray) -> Tuple[np.ndarray, Offset, Size]: 5433 shape = tuple(nparray.shape) 5434 data_type = nparray.dtype 5435 pickled_data_type = pickle_dumps(data_type) 5436 data_buffer: bytes = nparray.tobytes() 5437 offset, real_size = shared_memory.malloc(ObjectType.tnumpyndarray, bs * len(TNumpyNdarrayOffsets)) 5438 created_items_offsets: List[Offset] = list() 5439 try: 5440 data_buffer_mapped_obj, data_buffer_offset, data_buffer_size = shared_memory.put_obj(data_buffer) 5441 created_items_offsets.append(data_buffer_offset) 5442 shape_mapped_obj, shape_offset, shape_size = shared_memory.put_obj(shape) 5443 created_items_offsets.append(shape_offset) 5444 pickled_data_type_mapped_obj, pickled_data_type_offset, pickled_data_type_size = shared_memory.put_obj(pickled_data_type) 5445 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset, data_buffer_offset) 5446 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.shape_tuple_offset, shape_offset) 5447 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.pickled_datatype_offset, pickled_data_type_offset) 5448 mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type) 5449 except: 5450 shared_memory.free(offset) 5451 for item_offset in created_items_offsets: 5452 shared_memory.destroy_obj(item_offset) 5453 5454 raise 5455 5456 return mapped_nparray, offset, real_size 5457 5458 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict: 5459 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset): 5460 raise WrongObjectTypeError 5461 5462 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5463 shape_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.shape_tuple_offset) 5464 pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.pickled_datatype_offset) 5465 shape = shared_memory.get_obj(shape_offset) 5466 pickled_data_type = shared_memory.get_obj(pickled_data_type_offset) 5467 data_type = pickle_loads(pickled_data_type) 5468 mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type) 5469 return mapped_nparray 5470 5471 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5472 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset): 5473 raise WrongObjectTypeError 5474 5475 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5476 shape_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.shape_tuple_offset) 5477 pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.pickled_datatype_offset) 5478 shared_memory.destroy_obj(data_buffer_offset) 5479 shared_memory.destroy_obj(shape_offset) 5480 shared_memory.destroy_obj(pickled_data_type_offset) 5481 shared_memory.free(offset) 5482 5483 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5484 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5485 raise WrongObjectTypeError 5486 5487 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5488 return shared_memory.get_obj_buffer(data_buffer_offset) 5489 5490 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5491 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5492 raise WrongObjectTypeError 5493 5494 5495 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5496 return shared_memory.get_obj_buffer_2(data_buffer_offset) 5497 5498 5499# ====================================================================================================================== 5500# === Numpy ndarray ============================================================================================================= 5501 5502 5503class TTorchTensorOffsets(IntEnum): 5504 numpy_ndarray_offset = 0 5505 5506 5507class TTorchTensor: 5508 def map_to_shared_memory(self, shared_memory: 'SharedMemory', tensor: Tensor) -> Tuple[Tensor, Offset, Size]: 5509 offset, real_size = shared_memory.malloc(ObjectType.ttorchtensor, bs * len(TTorchTensorOffsets)) 5510 created_items_offsets: List[Offset] = list() 5511 try: 5512 numpy_ndarray_mapped_obj, numpy_ndarray_offset, numpy_ndarray_size = shared_memory.put_obj(tensor.numpy()) 5513 created_items_offsets.append(numpy_ndarray_offset) 5514 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset, numpy_ndarray_offset) 5515 mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj) 5516 except: 5517 self._offset = None 5518 shared_memory.free(offset) 5519 for item_offset in created_items_offsets: 5520 shared_memory.destroy_obj(item_offset) 5521 5522 raise 5523 return mapped_torch_tensor, offset, real_size 5524 5525 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict: 5526 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset): 5527 raise WrongObjectTypeError 5528 5529 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5530 numpy_ndarray_mapped_obj: np.ndarray = shared_memory.get_obj(numpy_ndarray_offset) 5531 mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj) 5532 return mapped_torch_tensor 5533 5534 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5535 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset): 5536 raise WrongObjectTypeError 5537 5538 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5539 shared_memory.destroy_obj(numpy_ndarray_offset) 5540 shared_memory.free(offset) 5541 5542 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5543 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5544 raise WrongObjectTypeError 5545 5546 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5547 return shared_memory.get_obj_buffer(numpy_ndarray_offset) 5548 5549 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5550 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5551 raise WrongObjectTypeError 5552 5553 5554 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5555 return shared_memory.get_obj_buffer_2(numpy_ndarray_offset) 5556 5557 5558# ====================================================================================================================== 5559# === Types and Codecs ========================================================================================================== 5560 5561 5562# Add your own codecs to `codec_by_type` 5563codec_by_type: Dict[ObjectType, TBase] = { 5564 ObjectType.tnone: TNone(), 5565 ObjectType.tint: TInt(), 5566 ObjectType.tbool: TBool(), 5567 ObjectType.tfloat: TFloat(), 5568 ObjectType.tcomplex: TComplex(), 5569 ObjectType.tdecimal: TDecimal(), 5570 ObjectType.tdatetime: TDatetime(), 5571 ObjectType.tslice: TSlice(), 5572 ObjectType.tbytes: TBytes(), 5573 ObjectType.tbytearray: TBytearray(), 5574 ObjectType.tstr: TStr(), 5575 ObjectType.tlist: TList(), 5576 ObjectType.ttuple: TTuple(), 5577 ObjectType.tmutableset: TMutableSet(), 5578 ObjectType.tset: TSet(), 5579 ObjectType.tmutablemapping: TMutableMapping(), 5580 ObjectType.tmapping: TMapping(), 5581 ObjectType.tfastset: TFastSet(), 5582 ObjectType.tfastdict: TFastDict(), 5583 ObjectType.tsmallint: TSmallInt(), 5584 ObjectType.tbigint: TBigInt(), 5585 ObjectType.tgeneralobject: TGeneralObject(), 5586 ObjectType.tpickable: TGeneralObject(), 5587 ObjectType.tstaticobject: TStaticObject(), 5588 ObjectType.tstaticobjectwithslots: TStaticObjectWithSlots(), 5589 ObjectType.tnumpyndarray: TNumpyNdarray(), 5590 ObjectType.ttorchtensor: TTorchTensor(), 5591} 5592 5593# Add your own types to `obj_type_map` 5594obj_type_map: Dict[Type, ObjectType] = { 5595} 5596 5597 5598# ====================================================================================================================== 5599# === Message ========================================================================================================== 5600 5601 5602class MessageOffsets(IntEnum): 5603 previous_message_offset = 0 5604 next_message_offset = 1 5605 item_offset = 2 5606 5607 5608class SharedMemory: 5609 def __init__(self, name: str, create: bool = False, size: Optional[int] = None, queue_type: QueueType = QueueType.fifo, zero_mem: bool = True, 5610 consumer_id: Optional[int] = None, creator_destroy_timeout: float = 5.0, unlink_old: bool = True): 5611 global current_shared_memory_instance 5612 current_shared_memory_instance = self 5613 self._initiated: bool = False 5614 self._consumer_id: Optional[int] = consumer_id 5615 self._creator_destroy_timeout: float = creator_destroy_timeout 5616 self.offset_to_be_monitored: Offset = None 5617 self._malloc_time: float = 0.0 5618 self._realloc_time: float = 0.0 5619 self._name: str = name 5620 self._create: bool = create 5621 self._queue_type: QueueType = queue_type 5622 self._zero_mem: bool = zero_mem 5623 self._last_message_offset: Offset = None 5624 self._asleep_func: Coroutine = self._default_asleep_func 5625 5626 sys_arr_length = len(SysValuesOffsets) 5627 self.global_sys_array_len: int = sys_arr_length 5628 arr_byte_size = sys_arr_length * bs 5629 self.global_sys_area_size: int = arr_byte_size 5630 5631 self._size: Optional[int] = size or None 5632 if (size is None) or (0 == size): 5633 size = self.global_sys_area_size 5634 if self._create: 5635 self._size = size 5636 5637 if self._create: 5638 if unlink_old: 5639 SharedMemory.unlink_by_name(name) 5640 5641 self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=name, create=create, size=size) 5642 self._init_post_mem() 5643 5644 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.total_mem_size, self._size) 5645 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.data_start_offset, sys_arr_length * bs) 5646 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.data_size, self._size - arr_byte_size) 5647 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.data_end_offset, self._size) 5648 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.free_memory_search_start, sys_arr_length * bs) 5649 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.first_message_offset, 0) 5650 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.last_message_offset, 0) 5651 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_in_charge, 0) 5652 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_in_charge, 0) 5653 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_wants_to_be_in_charge, 0) 5654 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_wants_to_be_in_charge, 0) 5655 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_ready, 0) 5656 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready, 0) 5657 # print(bytes(self._shared_memory.buf[0:120])) 5658 5659 self.free_memory_search_start = self.read_free_memory_search_start() 5660 data_size: int = self.get_data_size() 5661 if self._zero_mem: 5662 zero_memory(self.base_address, self.free_memory_search_start, data_size) 5663 5664 write_uint64(self.base_address, self.free_memory_search_start + bs * BaseObjOffsets.obj_type, ObjectType.tfree_memory.value) 5665 write_uint64(self.base_address, self.free_memory_search_start + bs * BaseObjOffsets.obj_size, data_size - bs * len(BaseObjOffsets)) 5666 5667 self.set_creator_ready() 5668 5669 # print(bytes(self._shared_memory.buf[0:120])) 5670 self.get_data_end_offset() 5671 if self._create: 5672 self._initiated = True 5673 5674 full_memory_barrier() 5675 5676 async def _default_asleep_func(self): 5677 await asyncio.sleep(0) 5678 5679 @property 5680 def size(self) -> int: 5681 return self._size 5682 5683 @property 5684 def name(self) -> str: 5685 return self._name 5686 5687 @property 5688 def create(self) -> bool: 5689 return self._create 5690 5691 def _init_post_mem(self): 5692 self.base_address = ctypes.addressof(ctypes.c_char.from_buffer(self._shared_memory.buf)) 5693 self.sys_values_offset = 0 5694 # if create: 5695 # print(f'Creator: {self.base_address=}') 5696 # else: 5697 # print(f'Consumer: {self.base_address=}') 5698 5699 # self._shared_memory_bytearray = bytearray(self._shared_memory.buf) 5700 5701 # self.sys_arr = np.ndarray((self.global_sys_array_len,), dtype=np.uint64, buffer=self._shared_memory.buf) 5702 # if DEBUG: 5703 # self.log_arr = np.ndarray((500,), dtype=np.uint64, buffer=self._shared_memory.buf) 5704 # else: 5705 # self.log_arr = self.sys_arr 5706 5707 def init_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool: 5708 if self._initiated: 5709 return 5710 5711 if not self.wait_shared_memory_ready(time_limit): 5712 return False 5713 5714 if (self._size is None) or (0 == self._size): 5715 size: int = self.global_sys_area_size 5716 else: 5717 size = self._size 5718 5719 self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size) 5720 self._init_post_mem() 5721 self.wait_creator_ready() 5722 5723 if self._size is None: 5724 self._size = read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.total_mem_size) 5725 self._shared_memory.close() 5726 self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size) 5727 5728 self._init_post_mem() 5729 self.free_memory_search_start = self.read_free_memory_search_start() 5730 5731 self.set_consumer_ready() 5732 5733 # print(bytes(self._shared_memory.buf[0:120])) 5734 self.get_data_end_offset() 5735 self._initiated = True 5736 full_memory_barrier() 5737 5738 async def ainit_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool: 5739 if self._initiated: 5740 return 5741 5742 if not await self.await_shared_memory_ready(time_limit): 5743 return False 5744 5745 if (self._size is None) or (0 == self._size): 5746 size: int = self.global_sys_area_size 5747 else: 5748 size = self._size 5749 5750 self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size) 5751 self._init_post_mem() 5752 await self.await_creator_ready(time_limit) 5753 5754 if self._size is None: 5755 self._size = read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.total_mem_size) 5756 self._shared_memory.close() 5757 self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size) 5758 5759 self._init_post_mem() 5760 self.free_memory_search_start = self.read_free_memory_search_start() 5761 5762 self.set_consumer_ready() 5763 5764 # print(bytes(self._shared_memory.buf[0:120])) 5765 self.get_data_end_offset() 5766 self._initiated = True 5767 full_memory_barrier() 5768 5769 def close_consumer(self): 5770 self.set_consumer_closed() 5771 full_memory_barrier() 5772 5773 def __enter__(self): 5774 return self 5775 5776 def __exit__(self, exc_type, exc_value, traceback): 5777 self.proper_close() 5778 5779 async def __aenter__(self): 5780 return self 5781 5782 async def __aexit__(self, exc_type, exc_value, traceback): 5783 await self.aproper_close() 5784 5785 def close(self): 5786 self._shared_memory.close() 5787 if self._create: 5788 self._shared_memory.unlink() 5789 SharedMemory.unlink_by_name(self._name) 5790 else: 5791 if 'posix' == os.name: 5792 try: 5793 from multiprocessing import resource_tracker 5794 shm_name = f'/{self._name}' 5795 resource_tracker.unregister(shm_name, "shared_memory") 5796 except FileNotFoundError: 5797 pass 5798 5799 def proper_close(self): 5800 if self._create: 5801 self.wait_consumer_closed(self._creator_destroy_timeout) 5802 else: 5803 self.close_consumer() 5804 5805 self.close() 5806 5807 async def aproper_close(self): 5808 if self._create: 5809 await self.await_consumer_closed(self._creator_destroy_timeout) 5810 else: 5811 self.close_consumer() 5812 5813 self.close() 5814 5815 @staticmethod 5816 def unlink_by_name(shared_memory_name: str): 5817 """`multiprocessing.SharedMemory` requires this cleanup in order to handle the case 5818 when the previous run of the program was terminated unexpectedly 5819 5820 Args: 5821 shared_memory_name (str): _description_ 5822 """ 5823 if 'posix' == os.name: 5824 try: 5825 import _posixshmem 5826 from multiprocessing import resource_tracker 5827 shm_name = f'/{shared_memory_name}' 5828 _posixshmem.shm_unlink(shm_name) 5829 resource_tracker.unregister(shm_name, "shared_memory") 5830 except FileNotFoundError: 5831 pass 5832 5833 @property 5834 def buf(self): 5835 """A memoryview of contents of the shared memory block. 5836 5837 Returns: 5838 _type_: _description_ 5839 """ 5840 return self._shared_memory.buf 5841 5842 def mem_view(self, offset: Offset, size: Size) -> memoryview: 5843 return self._shared_memory.buf[offset:offset + size] 5844 5845 def read_mem(self, offset: Offset, size: Size) -> List[int]: 5846 result = list() 5847 for i in range(size): 5848 result.append(read_uint8(self.base_address, offset + i)) 5849 5850 return result 5851 5852 def print_mem(self, offset: Offset, size: Size, text: str = None): 5853 result = list() 5854 for i in range(size): 5855 result.append(read_uint8(self.base_address, offset + i)) 5856 5857 if text: 5858 print(f'{text.format(offset)}: {result}') 5859 else: 5860 print(f'{result}') 5861 5862 def set_creator_ready(self): 5863 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_ready, 1) 5864 5865 def set_consumer_ready(self): 5866 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready, 1) 5867 5868 def set_consumer_closed(self): 5869 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready, 0) 5870 5871 def get_creator_ready(self): 5872 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_ready) 5873 5874 def get_consumer_ready(self): 5875 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready) 5876 5877 def wait_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool: 5878 start_time = cpu_clock() 5879 shared_memory: MultiprocessingSharedMemory = None 5880 while True: 5881 try: 5882 shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False) 5883 except FileNotFoundError as ex: 5884 if time_limit is not None: 5885 if (cpu_clock() - start_time) > time_limit: 5886 return False 5887 5888 if periodic_sleep_time is None: 5889 continue 5890 else: 5891 sleep(periodic_sleep_time) 5892 finally: 5893 if shared_memory is not None: 5894 shared_memory.close() 5895 return True 5896 5897 return False 5898 5899 async def await_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None) -> bool: 5900 start_time = cpu_clock() 5901 shared_memory: MultiprocessingSharedMemory = None 5902 while True: 5903 try: 5904 shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False) 5905 except FileNotFoundError as ex: 5906 if time_limit is not None: 5907 if (cpu_clock() - start_time) > time_limit: 5908 return False 5909 5910 await self._asleep_func() 5911 finally: 5912 if shared_memory is not None: 5913 shared_memory.close() 5914 return True 5915 5916 return False 5917 5918 def wait_creator_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool: 5919 if self._create: 5920 return 5921 5922 start_time = cpu_clock() 5923 full_memory_barrier() 5924 while not read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_ready): 5925 if time_limit is not None: 5926 if (cpu_clock() - start_time) > time_limit: 5927 return False 5928 5929 if periodic_sleep_time is None: 5930 mm_pause() 5931 else: 5932 hps_sleep(periodic_sleep_time) 5933 5934 full_memory_barrier() 5935 5936 async def await_creator_ready(self, time_limit: Optional[RationalNumber] = None) -> bool: 5937 if self._create: 5938 return 5939 5940 start_time = cpu_clock() 5941 full_memory_barrier() 5942 while not read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_ready): 5943 if time_limit is not None: 5944 if (cpu_clock() - start_time) > time_limit: 5945 return False 5946 5947 await self._asleep_func() 5948 5949 full_memory_barrier() 5950 5951 def wait_consumer_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool: 5952 if not self._create: 5953 return 5954 5955 start_time = cpu_clock() 5956 full_memory_barrier() 5957 while not read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready): 5958 if time_limit is not None: 5959 if (cpu_clock() - start_time) > time_limit: 5960 return False 5961 5962 if periodic_sleep_time is None: 5963 mm_pause() 5964 else: 5965 hps_sleep(periodic_sleep_time) 5966 5967 full_memory_barrier() 5968 5969 async def await_consumer_ready(self, time_limit: Optional[RationalNumber] = None) -> bool: 5970 if not self._create: 5971 return 5972 5973 start_time = cpu_clock() 5974 full_memory_barrier() 5975 while not read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready): 5976 if time_limit is not None: 5977 if (cpu_clock() - start_time) > time_limit: 5978 return False 5979 5980 await self._asleep_func() 5981 5982 full_memory_barrier() 5983 5984 def wait_consumer_closed(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool: 5985 if not self._create: 5986 return 5987 5988 start_time = cpu_clock() 5989 full_memory_barrier() 5990 while read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready): 5991 if time_limit is not None: 5992 if (cpu_clock() - start_time) > time_limit: 5993 return False 5994 5995 if periodic_sleep_time is None: 5996 mm_pause() 5997 else: 5998 hps_sleep(periodic_sleep_time) 5999 6000 full_memory_barrier() 6001 6002 async def await_consumer_closed(self, time_limit: Optional[RationalNumber] = None) -> bool: 6003 if not self._create: 6004 return 6005 6006 start_time = cpu_clock() 6007 full_memory_barrier() 6008 while read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_ready): 6009 if time_limit is not None: 6010 if (cpu_clock() - start_time) > time_limit: 6011 return False 6012 6013 await self._asleep_func() 6014 6015 full_memory_barrier() 6016 6017 def creator_in_charge(self) -> bool: 6018 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_in_charge) 6019 6020 def consumer_in_charge(self) -> bool: 6021 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_in_charge) 6022 6023 def creator_wants_to_be_in_charge(self) -> bool: 6024 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_wants_to_be_in_charge) 6025 6026 def consumer_wants_to_be_in_charge(self) -> bool: 6027 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_wants_to_be_in_charge) 6028 6029 def read_free_memory_search_start(self) -> int: 6030 # return self.get_data_start_offset() 6031 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.free_memory_search_start) 6032 6033 def update_free_memory_search_start(self) -> int: 6034 self.free_memory_search_start = self.read_free_memory_search_start() 6035 6036 def get_free_memory_search_start(self) -> int: 6037 # self.update_free_memory_search_start() 6038 return self.free_memory_search_start 6039 6040 def write_free_memory_search_start(self, offset: Offset) -> int: 6041 # return 6042 if ((self.get_data_end_offset() - bs * len(BaseObjOffsets)) < offset) or (offset < self.get_data_start_offset()): 6043 offset = self.get_data_start_offset() 6044 6045 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.free_memory_search_start, offset) 6046 6047 def commit_free_memory_search_start(self): 6048 self.write_free_memory_search_start(self.free_memory_search_start) 6049 6050 def set_free_memory_search_start(self, offset: Offset) -> int: 6051 # return 6052 if ((self.get_data_end_offset() - bs * len(BaseObjOffsets)) < offset) or (offset < self.get_data_start_offset()): 6053 offset = self.get_data_start_offset() 6054 6055 self.free_memory_search_start = offset 6056 # self.commit_free_memory_search_start() 6057 6058 def get_last_message_offset(self) -> Optional[Offset]: 6059 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.last_message_offset) 6060 6061 def set_last_message_offset(self, offset: Offset): 6062 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.last_message_offset, offset) 6063 6064 def get_first_message_offset(self) -> Optional[Offset]: 6065 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.first_message_offset) 6066 6067 def set_first_message_offset(self, offset: Offset): 6068 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.first_message_offset, offset) 6069 6070 def get_data_start_offset(self) -> Offset: 6071 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.data_start_offset) 6072 6073 def get_data_size(self) -> Size: 6074 return read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.data_size) 6075 6076 def get_data_end_offset(self) -> Offset: 6077 result = read_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.data_end_offset) 6078 if result != len(self._shared_memory.buf): 6079 print(result, len(self._shared_memory.buf)) 6080 6081 return result 6082 6083 # def read_uint64(self, offset: Offset) -> int: 6084 # return read_uint64(self.base_address, offset) 6085 6086 # def write_uint64(self, offset: Offset, value: int): 6087 # write_uint64(self.base_address, offset, value) 6088 6089 def read_uint64(self, offset: Offset) -> int: 6090 return int.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=False) 6091 6092 def write_uint64(self, offset: Offset, value: int): 6093 self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=False) 6094 6095 # def read_uint32(self, offset: Offset) -> int: 6096 # return int.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=False) 6097 6098 # def write_uint32(self, offset: Offset, value: int): 6099 # self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=False) 6100 6101 # def read_uint16(self, offset: Offset) -> int: 6102 # return int.from_bytes(self._shared_memory.buf[offset:offset + 2], byteorder='little', signed=False) 6103 6104 # def write_uint16(self, offset: Offset, value: int): 6105 # self._shared_memory.buf[offset:offset + 2] = value.to_bytes(2, byteorder='little', signed=False) 6106 6107 # def read_uint8(self, offset: Offset) -> int: 6108 # return int.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=False) 6109 6110 # def write_uint8(self, offset: Offset, value: int): 6111 # self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=False) 6112 6113 # def read_int64(self, offset: Offset) -> int: 6114 # return int.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=True) 6115 6116 # def write_int64(self, offset: Offset, value: int): 6117 # self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=True) 6118 6119 # def read_int32(self, offset: Offset) -> int: 6120 # return int.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=True) 6121 6122 # def write_int32(self, offset: Offset, value: int): 6123 # self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=True) 6124 6125 # def read_int16(self, offset: Offset) -> int: 6126 # return int.from_bytes(self._shared_memory.buf[offset:offset + 2], byteorder='little', signed=True) 6127 6128 # def write_int16(self, offset: Offset, value: int): 6129 # self._shared_memory.buf[offset:offset + 2] = value.to_bytes(2, byteorder='little', signed=True) 6130 6131 # def read_int8(self, offset: Offset) -> int: 6132 # return int.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=True) 6133 6134 # def write_int8(self, offset: Offset, value: int): 6135 # self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=True) 6136 6137 # def read_float(self, offset: Offset) -> float: 6138 # return float.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=False) 6139 6140 # def write_float(self, offset: Offset, value: float): 6141 # self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=False) 6142 6143 # def read_double(self, offset: Offset) -> float: 6144 # return float.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=False) 6145 6146 # def write_double(self, offset: Offset, value: float): 6147 # self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=False) 6148 6149 # def read_complex(self, offset: Offset) -> complex: 6150 # return complex.from_bytes(self._shared_memory.buf[offset:offset + 16], byteorder='little', signed=False) 6151 6152 # def write_complex(self, offset: Offset, value: complex): 6153 # self._shared_memory.buf[offset:offset + 16] = value.to_bytes(16, byteorder='little', signed=False) 6154 6155 # def read_bool(self, offset: Offset) -> bool: 6156 # return bool.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=False) 6157 6158 # def write_bool(self, offset: Offset, value: bool): 6159 # self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=False) 6160 6161 # def read_str(self, offset: Offset) -> str: 6162 # size = read_uint64(self.base_address, offset) 6163 # return self._shared_memory.buf[offset + 8:offset + 8 + size].decode() 6164 6165 # def read_str_2(self, offset: Offset, size: Size) -> str: 6166 # return self._shared_memory.buf[offset + 8:offset + 8 + size].decode() 6167 6168 # def write_str(self, offset: Offset, value: str): 6169 # size = len(value) 6170 # write_uint64(self.base_address, offset, size) 6171 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value.encode() 6172 6173 # def read_bytes(self, offset: Offset) -> bytes: 6174 # size = read_uint64(self.base_address, offset) 6175 # return self._shared_memory.buf[offset + 8:offset + 8 + size] 6176 6177 # def read_bytes_2(self, offset: Offset, size: Size) -> bytes: 6178 # return self._shared_memory.buf[offset + 8:offset + 8 + size] 6179 6180 # def write_bytes(self, offset: Offset, value: bytes): 6181 # size = len(value) 6182 # write_uint64(self.base_address, offset, size) 6183 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value 6184 6185 # def read_bytearray(self, offset: Offset) -> bytearray: 6186 # size = read_uint64(self.base_address, offset) 6187 # return bytearray(self._shared_memory.buf[offset + 8:offset + 8 + size]) 6188 6189 # def read_bytearray_2(self, offset: Offset, size: Size) -> bytearray: 6190 # return bytearray(self._shared_memory.buf[offset + 8:offset + 8 + size]) 6191 6192 # def write_bytearray(self, offset: Offset, value: bytearray): 6193 # size = len(value) 6194 # write_uint64(self.base_address, offset, size) 6195 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value 6196 6197 # def read_tuple(self, offset: Offset) -> tuple: 6198 # size = read_uint64(self.base_address, offset) 6199 # return tuple(self._shared_memory.buf[offset + 8:offset + 8 + size]) 6200 6201 # def write_tuple(self, offset: Offset, value: tuple): 6202 # size = len(value) 6203 # write_uint64(self.base_address, offset, size) 6204 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value 6205 6206 # def read_list(self, offset: Offset) -> list: 6207 # size = read_uint64(self.base_address, offset) 6208 # return list(self._shared_memory.buf[offset + 8:offset + 8 + size]) 6209 6210 # def write_list(self, offset: Offset, value: list): 6211 # size = len(value) 6212 # write_uint64(self.base_address, offset, size) 6213 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value 6214 6215 # def read_dict(self, offset: Offset) -> dict: 6216 # size = read_uint64(self.base_address, offset) 6217 # return dict(self._shared_memory.buf[offset + 8:offset + 8 + size]) 6218 6219 # def write_dict(self, offset: Offset, value: dict): 6220 # size = len(value) 6221 # write_uint64(self.base_address, offset, size) 6222 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value 6223 6224 # def read_set(self, offset: Offset) -> set: 6225 # size = read_uint64(self.base_address, offset) 6226 # return set(self._shared_memory.buf[offset + 8:offset + 8 + size]) 6227 6228 # def write_set(self, offset: Offset, value: set): 6229 # size = len(value) 6230 # write_uint64(self.base_address, offset, size) 6231 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value 6232 6233 # def read_pickable(self, offset: Offset) -> Any: 6234 # size = read_uint64(self.base_address, offset) 6235 # return pickle.loads(self._shared_memory.buf[offset + 8:offset + 8 + size]) 6236 6237 # def write_pickable(self, offset: Offset, value: Any): 6238 # value_bytes = pickle.dumps(value) 6239 # size = len(value_bytes) 6240 # write_uint64(self.base_address, offset, size) 6241 # self._shared_memory.buf[offset + 8:offset + 8 + size] = value_bytes 6242 6243 # ---------------------------- 6244 6245 def read_obj_type_and_size(self, offset: Offset) -> Tuple[ObjectType, Size]: 6246 obj_type = ObjectType(read_uint64(self.base_address, offset + bs * BaseObjOffsets.obj_type)) 6247 size = read_uint64(self.base_address, offset + bs * BaseObjOffsets.obj_size) 6248 return obj_type, size 6249 6250 def write_obj_type_and_size(self, offset: Offset, obj_type: ObjectType, size: Size): 6251 write_uint64(self.base_address, offset + bs * BaseObjOffsets.obj_type, obj_type.value) 6252 write_uint64(self.base_address, offset + bs * BaseObjOffsets.obj_size, size) 6253 return offset + bs * len(BaseObjOffsets) 6254 6255 # ---------------------------- 6256 6257 def test_free_memory_blocks(self, offset: Offset, desired_size: Size, data_end_offset: Offset) -> Tuple[bool, Size, Offset]: 6258 adjusted_size = desired_size 6259 initial_offset = offset 6260 sum_size = 0 6261 max_viable_offset = data_end_offset - bs * len(BaseObjOffsets) 6262 last_found_obj_offset = None 6263 last_found_obj_size = None 6264 while True: 6265 last_found_obj_offset = offset 6266 try: 6267 obj_type = ObjectType(read_uint64(self.base_address, offset)) 6268 except ValueError: 6269 print(f'Error: {offset=}, {desired_size=}, {sum_size=}') 6270 6271 size = read_uint64(self.base_address, offset + bs * BaseObjOffsets.obj_size) 6272 if size % bs: 6273 print(f'WRONG SIZE {obj_type=} {size=} {offset=} {desired_size=} {data_end_offset=}') 6274 self.print_mem(offset - bs * 10, bs * 10, 'WRONG SIZE - before') 6275 self.print_mem(offset, bs * 10, 'WRONG SIZE - after') 6276 raise RuntimeError(f'WRONG SIZE: {size=}, {offset=}, {obj_type=}') 6277 6278 last_found_obj_size = bs * len(BaseObjOffsets) + size 6279 next_block_offset = last_found_obj_offset + last_found_obj_size 6280 if next_block_offset > data_end_offset: 6281 print(f'{next_block_offset=}, {data_end_offset=}, {len(self._shared_memory.buf)=}') 6282 return False, adjusted_size, None, None, next_block_offset 6283 6284 if obj_type is not ObjectType.tfree_memory: 6285 return False, adjusted_size, None, None, next_block_offset 6286 6287 sum_size = next_block_offset - initial_offset 6288 6289 if sum_size == desired_size: 6290 return True, adjusted_size, None, None, next_block_offset 6291 6292 if sum_size > desired_size: 6293 new_next_block_offset = initial_offset + desired_size 6294 new_next_block_size = last_found_obj_size - (new_next_block_offset - last_found_obj_offset) 6295 if new_next_block_size < bs * len(BaseObjOffsets): 6296 adjusted_size = desired_size + new_next_block_size 6297 return True, adjusted_size, None, None, next_block_offset 6298 else: 6299 return True, adjusted_size, new_next_block_offset, new_next_block_size, new_next_block_offset 6300 6301 offset = last_found_obj_offset + last_found_obj_size 6302 if offset > max_viable_offset: 6303 return False, adjusted_size, None, None, next_block_offset 6304 6305 def combine_free_memory_blocks(self, free_mem_block_offset: Offset, size: Size, last_free_block_offset: Offset, last_free_block_new_size: Size, next_block_offset: Offset, mark_block: bool = False) -> Tuple[Size, Offset]: 6306 if mark_block: 6307 self.write_obj_type_and_size(free_mem_block_offset, ObjectType.tfree_memory, size - bs * len(BaseObjOffsets)) 6308 6309 if last_free_block_offset is not None: 6310 if last_free_block_new_size - bs * len(BaseObjOffsets) < 0: 6311 print(f'Error: {last_free_block_new_size=}') 6312 6313 self.write_obj_type_and_size(last_free_block_offset, ObjectType.tfree_memory, last_free_block_new_size - bs * len(BaseObjOffsets)) 6314 6315 # self.set_free_memory_search_start(next_block_offset) 6316 6317 # ---------------------------- 6318 6319 def malloc(self, obj_type: ObjectType, size: Size, loop_allowed: bool = True, zero_mem: bool = False) -> Tuple[Optional[Offset], Size]: 6320 start_time = cpu_clock() 6321 try: 6322 size += bs * len(BaseObjOffsets) 6323 size = nearest_size(size) 6324 adjusted_size = size 6325 initial_start_offset = self.get_free_memory_search_start() 6326 data_end_offset: Offset = self.get_data_end_offset() 6327 search_end_offset = data_end_offset - bs * len(BaseObjOffsets) 6328 start_offset = initial_start_offset 6329 free_mem_block_offset: Offset = None 6330 last_free_block_offset: Offset = None 6331 last_free_block_new_size: Size = None 6332 found: bool = False 6333 sum_size: Size = 0 6334 while (not found) and (start_offset <= search_end_offset): 6335 free_mem_block_offset = start_offset 6336 found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset) 6337 start_offset = next_block_offset 6338 6339 if (not found) and loop_allowed: 6340 start_offset = self.get_data_start_offset() 6341 search_end_offset = initial_start_offset - bs * len(BaseObjOffsets) 6342 while (not found) and (start_offset <= search_end_offset): 6343 free_mem_block_offset = start_offset 6344 found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset) 6345 start_offset = next_block_offset 6346 6347 if not found: 6348 raise FreeMemoryChunkNotFoundError(obj_type, size, loop_allowed, zero_mem) 6349 6350 self.combine_free_memory_blocks(free_mem_block_offset, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset) 6351 obj_size = adjusted_size - bs * len(BaseObjOffsets) 6352 self.write_obj_type_and_size(free_mem_block_offset, obj_type, obj_size) 6353 if zero_mem: 6354 # print(f'Zeroing memory 1: {free_mem_block_offset=}, {result_size=}') 6355 # hps_sleep(0.01) 6356 zero_memory(self.base_address, free_mem_block_offset + bs * len(BaseObjOffsets), obj_size) 6357 6358 if free_mem_block_offset % bs: 6359 print(f'Error: {free_mem_block_offset=}, {obj_size=}') 6360 6361 6362 self.set_free_memory_search_start(free_mem_block_offset) 6363 return free_mem_block_offset, obj_size 6364 finally: 6365 self._malloc_time += cpu_clock() - start_time 6366 6367 # def zero_memory(self, offset: Offset, size: Size): 6368 # # print(f'Zeroing memory 1: [{self.base_address + offset}:{self.base_address + offset + size}], {size=}') 6369 # self._shared_memory_bytearray[offset:offset + size] = bytearray(size) 6370 6371 def calloc(self, obj_type: ObjectType, size: Size, num: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]: 6372 return self.malloc(obj_type, size * num, loop_allowed, zero_mem) 6373 6374 def realloc(self, obj_offset: Offset, new_size: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]: 6375 start_time: float = cpu_clock() 6376 internal_malloc_time: float = 0.0 6377 try: 6378 new_size += bs * len(BaseObjOffsets) 6379 new_size = nearest_size(new_size) 6380 data_end_offset: Offset = self.get_data_end_offset() 6381 result_offset: Offset = None 6382 result_obj_size: Size = 0 6383 original_obj_size = read_uint64(self.base_address, obj_offset + bs * BaseObjOffsets.obj_size) 6384 size = original_obj_size + bs * len(BaseObjOffsets) 6385 next_obj_offset = obj_offset + size 6386 free_mem_block_offset = next_obj_offset 6387 dsize = new_size - size 6388 found, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(free_mem_block_offset, dsize, data_end_offset) 6389 if found: 6390 self.combine_free_memory_blocks(free_mem_block_offset, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset) 6391 if zero_mem: 6392 # print(f'Zeroing memory 3: {free_mem_block_offset=}, {result_size=}') 6393 # hps_sleep(0.01) 6394 zero_memory(self.base_address, free_mem_block_offset, dsize) 6395 6396 result_obj_size = new_size - bs * len(BaseObjOffsets) 6397 write_uint64(self.base_address, obj_offset + bs * BaseObjOffsets.obj_size, result_obj_size) 6398 self.set_free_memory_search_start(obj_offset) 6399 result_offset = obj_offset 6400 else: 6401 internal_malloc_start_time: float = cpu_clock() 6402 new_offset, result_obj_size = self.malloc(ObjectType(read_uint64(self.base_address, obj_offset + bs * BaseObjOffsets.obj_type)), new_size, loop_allowed) 6403 internal_malloc_time += cpu_clock() - internal_malloc_start_time 6404 if new_offset is None: 6405 return None, 0 6406 6407 self._shared_memory.buf[new_offset + bs * len(BaseObjOffsets):new_offset + bs * len(BaseObjOffsets) + size] = self._shared_memory.buf[obj_offset + bs * len(BaseObjOffsets):obj_offset + bs * len(BaseObjOffsets) + size] 6408 if zero_mem: 6409 # print(f'Zeroing memory 4: {new_offset=}, {new_size=}') 6410 # hps_sleep(0.01) 6411 zero_memory(self.base_address, new_offset + bs * len(BaseObjOffsets) + original_obj_size, result_obj_size - original_obj_size) 6412 6413 self.free(obj_offset) 6414 result_offset = new_offset 6415 6416 return result_offset, result_obj_size 6417 finally: 6418 self._realloc_time += cpu_clock() - start_time - internal_malloc_time 6419 6420 def free(self, offset: Offset) -> bool: 6421 write_uint64(self.base_address, offset, ObjectType.tfree_memory.value) 6422 return True 6423 6424 # ---------------------------- 6425 6426 def put_obj(self, obj: Any): 6427 obj_type = self._get_obj_type(obj) 6428 codec = codec_by_type[obj_type] 6429 mapped_obj, offset, size = codec.map_to_shared_memory(self, obj) 6430 return mapped_obj, offset, size 6431 6432 def get_obj(self, offset: int) -> Any: 6433 # print(f'get_obj: {offset=}') 6434 obj_type = ObjectType(read_uint64(self.base_address, offset)) 6435 if obj_type is ObjectType.tfree_memory: 6436 # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}') 6437 raise RuntimeError 6438 6439 codec = codec_by_type[obj_type] 6440 return codec.init_from_shared_memory(self, offset) 6441 6442 def get_obj_buffer(self, offset: int) -> memoryview: 6443 # print(f'get_obj: {offset=}') 6444 obj_type = ObjectType(read_uint64(self.base_address, offset)) 6445 if obj_type is ObjectType.tfree_memory: 6446 # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}') 6447 raise RuntimeError 6448 6449 codec = codec_by_type[obj_type] 6450 return codec.buffer(self, offset) 6451 6452 def get_obj_buffer_2(self, offset: int) -> Tuple[int, int]: 6453 # print(f'get_obj: {offset=}') 6454 obj_type = ObjectType(read_uint64(self.base_address, offset)) 6455 if obj_type is ObjectType.tfree_memory: 6456 # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}') 6457 raise RuntimeError 6458 6459 codec = codec_by_type[obj_type] 6460 return codec.buffer_2(self, offset) 6461 6462 def get_obj_mem_view(self, offset: int) -> memoryview: 6463 return self.mem_view(*self.get_obj_buffer_2(offset)) 6464 6465 def destroy_obj(self, offset: int) -> Any: 6466 obj_type = ObjectType(read_uint64(self.base_address, offset)) 6467 codec = codec_by_type[obj_type] 6468 return codec.destroy(self, offset) 6469 6470 # ---------------------------- 6471 6472 def map_object(self, obj: Any) -> Any: 6473 # self.update_free_memory_search_start() 6474 mapped_obj, offset, size = self.put_obj(obj) 6475 # self.commit_free_memory_search_start() 6476 return mapped_obj 6477 6478 def get_object(self, offset: Offset) -> Any: 6479 return self.get_obj(offset) 6480 6481 def destroy_object(self, offset: Offset) -> Any: 6482 return self.destroy_obj(offset) 6483 6484 # ---------------------------- 6485 6486 def write_message(self, obj: Any) -> Tuple[Any, Offset, Offset]: 6487 # self.update_free_memory_search_start() 6488 message_offset, message_real_size = self.malloc(ObjectType.tmessage, bs * len(MessageOffsets)) 6489 try: 6490 mapped_obj, offset, size = self.put_obj(obj) 6491 # self.commit_free_memory_search_start() 6492 last_message_offset: Offset = self.get_last_message_offset() 6493 if last_message_offset: 6494 write_uint64(self.base_address, last_message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.next_message_offset, message_offset) 6495 else: 6496 self.set_first_message_offset(message_offset) 6497 6498 write_uint64(self.base_address, message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.previous_message_offset, last_message_offset) 6499 write_uint64(self.base_address, message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.next_message_offset, 0) 6500 write_uint64(self.base_address, message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.item_offset, offset) 6501 self.set_last_message_offset(message_offset) 6502 except: 6503 self.free(message_offset) 6504 raise 6505 6506 return mapped_obj, offset, message_offset 6507 6508 def put_message(self, obj: Any) -> Any: 6509 mapped_obj, offset, message_offset = self.write_message(obj) 6510 return mapped_obj 6511 6512 def put_message_2(self, obj: Any) -> Tuple[Any, Offset]: 6513 mapped_obj, offset, message_offset = self.write_message(obj) 6514 return mapped_obj, offset 6515 6516 def has_messages(self) -> bool: 6517 return self.get_last_message_offset() != 0 6518 6519 def read_message_info(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset], Optional[Offset]]: 6520 # print(0) 6521 if QueueType.fifo == queue_type: 6522 message_offset = self.get_first_message_offset() 6523 # print(f'0.0| {message_offset=}') 6524 if not message_offset: 6525 return None, None, None 6526 6527 next_message_offset = read_uint64(self.base_address, message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.next_message_offset) 6528 self.set_first_message_offset(next_message_offset) 6529 if next_message_offset: 6530 write_uint64(self.base_address, next_message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.previous_message_offset, 0) 6531 else: 6532 self.set_last_message_offset(0) 6533 else: 6534 message_offset = self.get_last_message_offset() 6535 # print(f'0.1| {message_offset=}') 6536 if not message_offset: 6537 return None, None, None 6538 6539 prev_message_offset = read_uint64(self.base_address, message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.previous_message_offset) 6540 self.set_last_message_offset(prev_message_offset) 6541 if prev_message_offset: 6542 write_uint64(self.base_address, prev_message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.next_message_offset, 0) 6543 else: 6544 self.set_first_message_offset(0) 6545 6546 # print(1) 6547 obj_offset = read_uint64(self.base_address, message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.item_offset) 6548 # print(2) 6549 if not obj_offset: 6550 return None, None, message_offset 6551 6552 # print(3) 6553 obj = self.get_obj(obj_offset) 6554 # print(4) 6555 return obj, obj_offset, message_offset 6556 6557 def destroy_message(self, message_offset: Offset): 6558 if not message_offset: 6559 return 6560 6561 # obj_offset = read_uint64(self.base_address, message_offset + bs * len(BaseObjOffsets) + bs * MessageOffsets.item_offset) 6562 # if obj_offset: 6563 # self.destroy_obj(obj_offset) 6564 6565 # self.destroy_obj(message_offset) 6566 6567 self.free(message_offset) 6568 6569 def read_message(self, queue_type: QueueType = QueueType.fifo) -> Any: 6570 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6571 if message_offset: 6572 return obj 6573 else: 6574 raise NoMessagesInQueueError 6575 6576 def read_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]: 6577 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6578 if message_offset: 6579 return obj, obj_offset 6580 else: 6581 raise NoMessagesInQueueError 6582 6583 def take_message(self, queue_type: QueueType = QueueType.fifo) -> Any: 6584 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6585 if message_offset: 6586 self.destroy_message(message_offset) 6587 else: 6588 raise NoMessagesInQueueError 6589 6590 return obj 6591 6592 def take_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]: 6593 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6594 if message_offset: 6595 self.destroy_message(message_offset) 6596 else: 6597 raise NoMessagesInQueueError 6598 6599 return obj, obj_offset 6600 6601 def get_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any: 6602 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6603 if message_offset: 6604 return obj 6605 else: 6606 return default 6607 6608 def get_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]: 6609 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6610 if message_offset: 6611 return obj, obj_offset 6612 else: 6613 return default, None 6614 6615 def pop_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any: 6616 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6617 if message_offset: 6618 self.destroy_message(message_offset) 6619 else: 6620 obj = default 6621 6622 return obj 6623 6624 def pop_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]: 6625 obj, obj_offset, message_offset = self.read_message_info(queue_type) 6626 if message_offset: 6627 self.destroy_message(message_offset) 6628 else: 6629 obj = default 6630 obj_offset = None 6631 6632 return obj, obj_offset 6633 6634 # ---------------------------- 6635 6636 def get_in_line(self) -> bool: 6637 if self._create: 6638 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_in_charge, 0) 6639 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_wants_to_be_in_charge, 1) 6640 full_memory_barrier() 6641 if self.consumer_in_charge(): 6642 return False 6643 else: 6644 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_in_charge, 1) 6645 full_memory_barrier() 6646 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_wants_to_be_in_charge, 0) 6647 full_memory_barrier() 6648 self.update_free_memory_search_start() 6649 if self.consumer_in_charge(): 6650 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_in_charge, 0) 6651 full_memory_barrier() 6652 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_wants_to_be_in_charge, 1) 6653 full_memory_barrier() 6654 return False 6655 6656 return True 6657 else: 6658 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_in_charge, 0) 6659 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_wants_to_be_in_charge, 1) 6660 full_memory_barrier() 6661 if self.creator_in_charge(): 6662 return False 6663 else: 6664 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_in_charge, 1) 6665 full_memory_barrier() 6666 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_wants_to_be_in_charge, 0) 6667 full_memory_barrier() 6668 self.update_free_memory_search_start() 6669 if self.creator_in_charge(): 6670 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_in_charge, 0) 6671 full_memory_barrier() 6672 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_wants_to_be_in_charge, 1) 6673 full_memory_barrier() 6674 return False 6675 6676 return True 6677 6678 def release(self): 6679 self.commit_free_memory_search_start() 6680 if self._create: 6681 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_in_charge, 0) 6682 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.creator_wants_to_be_in_charge, 0) 6683 full_memory_barrier() 6684 else: 6685 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_in_charge, 0) 6686 write_uint64(self.base_address, self.sys_values_offset + bs * SysValuesOffsets.consumer_wants_to_be_in_charge, 0) 6687 full_memory_barrier() 6688 6689 def wait_my_turn(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool: 6690 start_time = cpu_clock() 6691 while not self.get_in_line(): 6692 if time_limit is not None: 6693 if (cpu_clock() - start_time) > time_limit: 6694 return False 6695 6696 if periodic_sleep_time is None: 6697 mm_pause() 6698 else: 6699 hps_sleep(periodic_sleep_time) 6700 6701 return True 6702 6703 async def await_my_turn(self, time_limit: Optional[RationalNumber] = None) -> bool: 6704 start_time = cpu_clock() 6705 while not self.get_in_line(): 6706 if time_limit is not None: 6707 if (cpu_clock() - start_time) > time_limit: 6708 return False 6709 6710 await self._asleep_func() 6711 6712 return True 6713 6714 # ---------------------------- 6715 6716 def wait_for_messages(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool: 6717 start_time = cpu_clock() 6718 has_messages = False 6719 while not has_messages: 6720 if time_limit is not None: 6721 if (cpu_clock() - start_time) > time_limit: 6722 return False 6723 6724 if periodic_sleep_time is None: 6725 mm_pause() 6726 else: 6727 hps_sleep(periodic_sleep_time) 6728 6729 with wait_my_turn(self): 6730 has_messages = self.has_messages() 6731 6732 return True 6733 6734 async def await_for_messages(self, time_limit: Optional[RationalNumber] = None) -> bool: 6735 start_time = cpu_clock() 6736 has_messages = False 6737 while not has_messages: 6738 if time_limit is not None: 6739 if (cpu_clock() - start_time) > time_limit: 6740 return False 6741 6742 await self._asleep_func() 6743 6744 with await_my_turn(self, time_limit): 6745 has_messages = self.has_messages() 6746 6747 return True 6748 6749 # ---------------------------- 6750 6751 @staticmethod 6752 def _get_obj_type(obj: Any) -> ObjectType: 6753 obj_type = type(obj) 6754 if obj is None: 6755 obj_type_atom: ObjectType = ObjectType.tnone 6756 elif obj_type is bool: 6757 obj_type_atom = ObjectType.tbool 6758 elif obj_type is int: 6759 obj_type_atom = ObjectType.tint 6760 elif obj_type is float: 6761 obj_type_atom = ObjectType.tfloat 6762 elif obj_type is complex: 6763 obj_type_atom = ObjectType.tcomplex 6764 elif obj_type is Decimal: 6765 obj_type_atom = ObjectType.tdecimal 6766 elif obj_type is slice: 6767 obj_type_atom = ObjectType.tslice 6768 elif obj_type is str: 6769 obj_type_atom = ObjectType.tstr 6770 elif obj_type is bytes: 6771 obj_type_atom = ObjectType.tbytes 6772 elif obj_type is bytearray: 6773 obj_type_atom = ObjectType.tbytearray 6774 elif obj_type is tuple: 6775 obj_type_atom = ObjectType.ttuple 6776 elif obj_type is list: 6777 obj_type_atom = ObjectType.tlist 6778 elif obj_type in {datetime, timedelta, timezone, date, time}: 6779 obj_type_atom = ObjectType.tdatetime 6780 elif issubclass(obj_type, FastLimitedSet): 6781 obj_type_atom = ObjectType.tfastset 6782 elif issubclass(obj_type, AbsMutableSet): 6783 obj_type_atom = ObjectType.tmutableset 6784 elif issubclass(obj_type, AbsSet): 6785 obj_type_atom = ObjectType.tset 6786 elif issubclass(obj_type, FastLimitedDict): 6787 obj_type_atom = ObjectType.tfastdict 6788 elif issubclass(obj_type, ForceMapping): 6789 obj_type_atom = ObjectType.tmapping 6790 elif issubclass(obj_type, AbsMutableMapping): 6791 obj_type_atom = ObjectType.tmutablemapping 6792 elif issubclass(obj_type, AbsMapping): 6793 obj_type_atom = ObjectType.tmapping 6794 elif obj_type is SmallInt: 6795 obj_type_atom = ObjectType.tsmallint 6796 elif obj_type is BigInt: 6797 obj_type_atom = ObjectType.tbigint 6798 elif issubclass(obj_type, Tensor): 6799 obj_type_atom = ObjectType.ttorchtensor 6800 elif issubclass(obj_type, np.ndarray): 6801 obj_type_atom = ObjectType.tnumpyndarray 6802 elif issubclass(obj_type, (ForceGeneralObjectCopy, ForceGeneralObjectInplace)): 6803 obj_type_atom = ObjectType.tgeneralobject 6804 elif issubclass(obj_type, (ForceStaticObjectCopy, ForceStaticObjectInplace)): 6805 obj_type_atom = ObjectType.tstaticobject 6806 elif obj_type in obj_type_map: 6807 obj_type_atom = obj_type_map[obj_type] 6808 # elif hasattr(obj, '__dict__'): 6809 # obj_type_atom = ObjectType.tgeneralobject 6810 # else: 6811 # obj_type_atom = ObjectType.tpickable 6812 elif hasattr(obj, '__slots__') or ((not hasattr(obj, '__slots__')) and (not hasattr(obj, '__dict__'))): 6813 obj_type_atom = ObjectType.tstaticobjectwithslots 6814 else: 6815 # obj_type_atom = ObjectType.tgeneralobject 6816 obj_type_atom = ObjectType.tstaticobject 6817 6818 return obj_type_atom 6819 6820 6821# @contextmanager 6822# def get_in_line(shared_memory: SharedMemory): 6823# shared_memory.get_in_line() 6824# try: 6825# yield 6826# finally: 6827# shared_memory.release() 6828 6829 6830class GetInLine: 6831 def __init__(self, shared_memory: SharedMemory): 6832 self.shared_memory: SharedMemory = shared_memory 6833 6834 def __enter__(self): 6835 self.shared_memory.get_in_line() 6836 return 6837 6838 def __exit__(self, exc_type, exc_value, traceback): 6839 self.shared_memory.release() 6840 6841 6842get_in_line = GetInLine 6843 6844 6845# @contextmanager 6846# def wait_my_turn(shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6847# shared_memory.wait_my_turn(time_limit, periodic_sleep_time) 6848# try: 6849# yield 6850# finally: 6851# shared_memory.release() 6852 6853 6854class WaitMyTurn: 6855 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6856 self.shared_memory: SharedMemory = shared_memory 6857 self.time_limit: Optional[RationalNumber] = time_limit 6858 self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time 6859 6860 def __enter__(self): 6861 self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time) 6862 return 6863 6864 def __exit__(self, exc_type, exc_value, traceback): 6865 self.shared_memory.release() 6866 6867 6868wait_my_turn = WaitMyTurn 6869 6870 6871# @contextmanager 6872# def wait_my_turn_when_has_messages(shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6873# while True: 6874# if not shared_memory.wait_my_turn(time_limit, periodic_sleep_time): 6875# raise OperationTimedOutError 6876 6877# try: 6878# if not shared_memory.has_messages(): 6879# continue 6880 6881# yield 6882# break 6883# finally: 6884# shared_memory.release() 6885 6886 6887class WaitMyTurnWhenHasMessages: 6888 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6889 self.shared_memory: SharedMemory = shared_memory 6890 self.time_limit: Optional[RationalNumber] = time_limit 6891 self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time 6892 6893 def __enter__(self): 6894 while True: 6895 if not self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time): 6896 raise OperationTimedOutError 6897 6898 if self.shared_memory.has_messages(): 6899 return 6900 else: 6901 self.shared_memory.release() 6902 6903 def __exit__(self, exc_type, exc_value, traceback): 6904 self.shared_memory.release() 6905 6906 6907wait_my_turn_when_has_messages = WaitMyTurnWhenHasMessages 6908 6909 6910class await_my_turn: 6911 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None): 6912 self.shared_memory: SharedMemory = shared_memory 6913 self.time_limit: Optional[RationalNumber] = time_limit 6914 6915 async def __aenter__(self): 6916 await self.shared_memory.await_my_turn(self.time_limit) 6917 6918 async def __aexit__(self, exc_type, exc_val, exc_tb): 6919 self.shared_memory.release() 6920 6921 6922def numpy_array_memory_size(np_shape, np_dtype): 6923 num_elements = np.prod(np_shape) 6924 element_size = np.dtype(np_dtype).itemsize 6925 memory_size_bytes = num_elements * element_size 6926 return memory_size_bytes 6927 6928 6929def numpy_array_made_from_pointer_memory_size(np_shape, ctypes_type) -> int: 6930 num_elements: int = np.prod(np_shape) 6931 element_size: int = ctypes.sizeof(ctypes_type) 6932 memory_size_bytes: int = num_elements * element_size 6933 return memory_size_bytes 6934 6935 6936from ctypes import _SimpleCData 6937 6938def make_numpy_array_from_obj_offset(shared_memory: SharedMemory, offset: Offset, np_shape, np_dtype_or_ctypes_type = None) -> Any: 6939 if np_dtype_or_ctypes_type is None: 6940 np_dtype_or_ctypes_type = ctypes.c_uint8 6941 6942 data_offset, data_size = shared_memory.get_obj_buffer_2(offset) 6943 if isinstance(np_dtype_or_ctypes_type, _SimpleCData): 6944 num_elements = np.prod(np_shape) 6945 np_array_size = num_elements * ctypes.sizeof(np_dtype_or_ctypes_type) 6946 if data_size < np_array_size: 6947 raise ObjBufferIsSmallerThanRequestedNumpyArrayError(data_size, np_array_size) 6948 6949 data_address = shared_memory.base_address + data_offset 6950 void_ptr = ctypes.c_void_p(data_address) 6951 # actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type * num_elements)) 6952 actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type)) 6953 return np.ctypeslib.as_array(actual_ptr, shape=np_shape) 6954 else: 6955 return np.ndarray(np_shape, dtype=np_dtype_or_ctypes_type, buffer=shared_memory.mem_view(data_offset, data_size)) 6956 6957 6958def zero_bytes_from_numpy_array(np: np.ndarray) -> bytes: 6959 return bytes(np.nbytes) 6960 6961 6962def bytes_from_numpy_array(np: np.ndarray) -> bytes: 6963 return np.tobytes() 6964 6965 6966def dict_to_list(mapping: AbsMapping) -> List: 6967 items_num = max(mapping.keys()) 6968 result = [None] * items_num 6969 for key, value in mapping.items(): 6970 result[key] = value 6971 6972 return result 6973 6974 6975def list_to_dict(data_list: List) -> Dict: 6976 return {key: value for key, value in enumerate(data_list)} 6977 6978 6979def intenum_dict_to_list(mapping: AbsMapping, int_enum_class: Optional[Type] = None) -> List: 6980 if int_enum_class: 6981 items_num = len(int_enum_class) 6982 else: 6983 first_key_type_detected: bool = False 6984 for first_key in mapping.keys(): 6985 first_key_type = type(first_key) 6986 if issubclass(first_key_type, IntEnum): 6987 items_num = len(first_key_type) 6988 first_key_type_detected = True 6989 6990 if not first_key_type_detected: 6991 items_num = max(mapping.keys(), key=lambda value: int(value)) 6992 6993 result = [None] * items_num 6994 for key, value in mapping.items(): 6995 result[int(key)] = value 6996 6997 return result 6998 6999 7000def intenum_list_to_dict(data_list: List, int_enum_class: Optional[Type] = None) -> Dict: 7001 if int_enum_class: 7002 return {int_enum_class(key): value for key, value in enumerate(data_list)} 7003 else: 7004 return {key: value for key, value in enumerate(data_list)}
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
121class ObjectType(IntEnum): 122 tfree_memory = 0 123 tmessage = 1 124 tnone = 2 125 tbool = 3 126 tint = 4 127 tfloat = 5 128 tcomplex = 6 129 tstr = 7 130 tbytes = 8 131 tbytearray = 9 132 ttuple = 10 133 tlist = 11 134 tmutableset = 12 135 tset = 13 136 tmutablemapping = 14 137 tmapping = 15 138 tfastdict = 16 139 tclass = 17 140 tpickable = 18 141 tinternal_list = 19 142 tsmallint = 20 143 tbigint = 21 144 tgeneralobject = 22 145 tnumpyndarray = 23 146 ttorchtensor = 24 147 tstaticobject = 25 148 tfastset = 26 149 tslice = 27 150 tdecimal = 28 151 tdatetime = 29 152 tstaticobjectwithslots = 30
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
155class SysValuesOffsets(IntEnum): 156 total_mem_size = 0 157 data_start_offset = 1 158 data_size = 2 159 data_end_offset = 3 160 free_memory_search_start = 4 161 first_message_offset = 5 162 last_message_offset = 6 163 creator_in_charge = 7 164 consumer_in_charge = 8 165 creator_wants_to_be_in_charge = 9 166 consumer_wants_to_be_in_charge = 10 167 creator_ready = 11 168 consumer_ready = 12
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
Common base class for all non-exit exceptions.
Inherited Members
- builtins.Exception
- Exception
- builtins.BaseException
- with_traceback
- args
186class FreeMemoryChunkNotFoundError(SharedMemoryError): 187 """Indicates that an unpartitioned chunk of free memory of requested size not being found. 188 189 Regarding this error, it’s important to adjust the size parameter in the SharedMemory configuration. Trying to estimate memory consumption down to the byte is not practical because it fails to account for the memory overhead required by each entity stored (such as entity type metadata, pointers to child entities, etc.). 190 191 When setting the size parameter for SharedMemory, consider using broader units like tens (for embedded systems), hundreds, or thousands of megabytes, rather than precise byte counts. This approach is similar to how you would not precisely calculate the amount of memory needed for a web server hosted externally; you make an educated guess, like assuming that 256 MB might be insufficient but 768 MB could be adequate, and then adjust based on practical testing. 192 193 Also, be aware of memory fragmentation, which affects all memory allocation systems, including the OS itself. For example, if you have a SharedMemory pool sized to store exactly ten 64-bit integers, accounting for additional bytes for system information, your total might be around 200 bytes. Initially, after storing the integers, your memory might appear as ["int", "int", ..., "int"]. If you delete every second integer, the largest contiguous free memory chunk could be just 10 bytes, despite having 50 bytes free in total. This fragmentation means you cannot store a larger data structure like a 20-byte string which needs contiguous space. 194 195 To resolve this, simply increase the size parameter value of SharedMemory. This is akin to how you would manage memory allocation for server hosting or thread stack sizes in software development. 196 197 Args: 198 SharedMemoryError (_type_): _description_ 199 """ 200 pass
Indicates that an unpartitioned chunk of free memory of requested size not being found.
Regarding this error, it’s important to adjust the size parameter in the SharedMemory configuration. Trying to estimate memory consumption down to the byte is not practical because it fails to account for the memory overhead required by each entity stored (such as entity type metadata, pointers to child entities, etc.).
When setting the size parameter for SharedMemory, consider using broader units like tens (for embedded systems), hundreds, or thousands of megabytes, rather than precise byte counts. This approach is similar to how you would not precisely calculate the amount of memory needed for a web server hosted externally; you make an educated guess, like assuming that 256 MB might be insufficient but 768 MB could be adequate, and then adjust based on practical testing.
Also, be aware of memory fragmentation, which affects all memory allocation systems, including the OS itself. For example, if you have a SharedMemory pool sized to store exactly ten 64-bit integers, accounting for additional bytes for system information, your total might be around 200 bytes. Initially, after storing the integers, your memory might appear as ["int", "int", ..., "int"]. If you delete every second integer, the largest contiguous free memory chunk could be just 10 bytes, despite having 50 bytes free in total. This fragmentation means you cannot store a larger data structure like a 20-byte string which needs contiguous space.
To resolve this, simply increase the size parameter value of SharedMemory. This is akin to how you would manage memory allocation for server hosting or thread stack sizes in software development.
Args: SharedMemoryError (_type_): _description_
Inherited Members
- builtins.Exception
- Exception
- builtins.BaseException
- with_traceback
- args
Common base class for all non-exit exceptions.
Inherited Members
- builtins.Exception
- Exception
- builtins.BaseException
- with_traceback
- args
Common base class for all non-exit exceptions.
Inherited Members
- builtins.Exception
- Exception
- builtins.BaseException
- with_traceback
- args
Common base class for all non-exit exceptions.
Inherited Members
- builtins.Exception
- Exception
- builtins.BaseException
- with_traceback
- args
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
236class TBase: 237 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 238 raise NotImplementedError 239 240 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 241 raise NotImplementedError 242 243 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 244 raise NotImplementedError 245 246 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 247 raise NotImplementedError 248 249 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 250 raise NotImplementedError
257class TNone: 258 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: None) -> Tuple[None, Offset, Size]: 259 offset, real_size = shared_memory.malloc(ObjectType.tnone, 0) 260 return obj, offset, real_size 261 262 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 263 if ObjectType.tnone != read_uint64(shared_memory.base_address, offset): 264 raise WrongObjectTypeError 265 266 return None 267 268 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 269 if ObjectType.tnone != read_uint64(shared_memory.base_address, offset): 270 raise WrongObjectTypeError 271 272 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
283class TInt: 284 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]: 285 offset, real_size = shared_memory.malloc(ObjectType.tint, bs * len(IntOffsets)) 286 write_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * IntOffsets.data, obj) 287 return obj, offset, real_size 288 289 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int: 290 if ObjectType.tint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 291 raise WrongObjectTypeError 292 293 return read_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * IntOffsets.data) 294 295 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 296 if ObjectType.tint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 297 raise WrongObjectTypeError 298 299 shared_memory.free(offset)
int([x]) -> integer int(x, base=10) -> integer
Convert a number or string to an integer, or return 0 if no arguments are given. If x is a number, return x.__int__(). For floating point numbers, this truncates towards zero.
If x is not a number or if base is given, then x must be a string, bytes, or bytearray instance representing an integer literal in the given base. The literal can be preceded by '+' or '-' and be surrounded by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. Base 0 means to interpret the base from the string as an integer literal.
>>> int('0b100', base=0)
4
Inherited Members
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
318class TSmallInt: 319 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]: 320 offset, real_size = shared_memory.malloc(ObjectType.tsmallint, bs * len(SmallIntOffsets)) 321 write_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SmallIntOffsets.data, obj) 322 return obj, offset, real_size 323 324 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int: 325 if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 326 raise WrongObjectTypeError 327 328 return read_int64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SmallIntOffsets.data) 329 330 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 331 if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 332 raise WrongObjectTypeError 333 334 shared_memory.free(offset)
int([x]) -> integer int(x, base=10) -> integer
Convert a number or string to an integer, or return 0 if no arguments are given. If x is a number, return x.__int__(). For floating point numbers, this truncates towards zero.
If x is not a number or if base is given, then x must be a string, bytes, or bytearray instance representing an integer literal in the given base. The literal can be preceded by '+' or '-' and be surrounded by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. Base 0 means to interpret the base from the string as an integer literal.
>>> int('0b100', base=0)
4
Inherited Members
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
354class TBigInt: 355 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]: 356 data = bint_to_bytes(obj) 357 data_size = len(data) 358 # offset, real_size = shared_memory.malloc(ObjectType.tbigint, bs * len(BigIntOffsets) + bs * data_size) 359 offset, real_size = shared_memory.malloc(ObjectType.tbigint, bs * len(BigIntOffsets) + data_size) 360 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size, data_size) 361 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 362 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data 363 return obj, offset, real_size 364 365 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int: 366 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 367 raise WrongObjectTypeError 368 369 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 370 if data_size: 371 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 372 data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 373 return bytes_to_bint(data) 374 else: 375 return 0 376 377 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 378 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 379 raise WrongObjectTypeError 380 381 shared_memory.free(offset) 382 383 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 384 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 385 raise WrongObjectTypeError 386 387 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 388 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 389 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 390 391 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 392 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 393 raise WrongObjectTypeError 394 395 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 396 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 397 return data_offset, data_size
383 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 384 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 385 raise WrongObjectTypeError 386 387 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 388 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 389 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
391 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 392 if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 393 raise WrongObjectTypeError 394 395 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data_size) 396 data_offset = offset + bs * len(BaseObjOffsets) + bs * BigIntOffsets.data 397 return data_offset, data_size
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
408class TBool: 409 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bool) -> Tuple[bool, Offset, Size]: 410 offset, real_size = shared_memory.malloc(ObjectType.tbool, bs * len(BoolOffsets)) 411 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BoolOffsets.data, int(obj)) 412 return obj, offset, real_size 413 414 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bool: 415 if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 416 raise WrongObjectTypeError 417 418 return bool(read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BoolOffsets.data)) 419 420 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 421 if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 422 raise WrongObjectTypeError 423 424 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
435class TFloat: 436 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: float) -> Tuple[float, Offset, Size]: 437 offset, real_size = shared_memory.malloc(ObjectType.tfloat, bs * len(FloatOffsets)) 438 write_double(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FloatOffsets.data, obj) 439 return obj, offset, real_size 440 441 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> float: 442 if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset): 443 raise WrongObjectTypeError 444 445 return read_double(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FloatOffsets.data) 446 447 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 448 if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset): 449 raise WrongObjectTypeError 450 451 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
463class TBytes: 464 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytes) -> Tuple[bytes, Offset, Size]: 465 data_size = len(obj) 466 # offset, real_size = shared_memory.malloc(ObjectType.tbytes, bs * len(BytesOffsets) + bs * data_size) 467 offset, real_size = shared_memory.malloc(ObjectType.tbytes, bs * len(BytesOffsets) + data_size) 468 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size, data_size) 469 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 470 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = obj 471 return obj, offset, real_size 472 473 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytes: 474 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 475 raise WrongObjectTypeError 476 477 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 478 if data_size: 479 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 480 obj = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 481 return obj 482 else: 483 return bytes() 484 485 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 486 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 487 raise WrongObjectTypeError 488 489 shared_memory.free(offset) 490 491 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 492 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 493 raise WrongObjectTypeError 494 495 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 496 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 497 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 498 499 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 500 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 501 raise WrongObjectTypeError 502 503 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 504 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 505 return data_offset, data_size
491 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 492 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 493 raise WrongObjectTypeError 494 495 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 496 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 497 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
499 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 500 if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 501 raise WrongObjectTypeError 502 503 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data_size) 504 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytesOffsets.data 505 return data_offset, data_size
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
561class TBytearray: 562 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytearray) -> Tuple[bytearray, Offset, Size]: 563 data = bytes(obj) 564 data_size = len(data) 565 # offset, real_size = shared_memory.malloc(ObjectType.tbytearray, bs * len(BytearrayOffsets) + bs * data_size) 566 offset, real_size = shared_memory.malloc(ObjectType.tbytearray, bs * len(BytearrayOffsets) + data_size) 567 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size, data_size) 568 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 569 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data 570 return obj, offset, real_size 571 572 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytearray: 573 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 574 raise WrongObjectTypeError 575 576 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 577 if data_size: 578 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 579 data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 580 return bytearray(data) 581 else: 582 return bytearray(bytes()) 583 584 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 585 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 586 raise WrongObjectTypeError 587 588 shared_memory.free(offset) 589 590 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 591 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 592 raise WrongObjectTypeError 593 594 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 595 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 596 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 597 598 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 599 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 600 raise WrongObjectTypeError 601 602 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 603 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 604 return data_offset, data_size
590 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 591 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 592 raise WrongObjectTypeError 593 594 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 595 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 596 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
598 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 599 if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 600 raise WrongObjectTypeError 601 602 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data_size) 603 data_offset = offset + bs * len(BaseObjOffsets) + bs * BytearrayOffsets.data 604 return data_offset, data_size
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
616class TStr: 617 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: str) -> Tuple[str, Offset, Size]: 618 data = str.encode(obj) 619 data_size = len(data) 620 # offset, real_size = shared_memory.malloc(ObjectType.tstr, bs * len(StrOffsets) + bs * data_size) 621 offset, real_size = shared_memory.malloc(ObjectType.tstr, bs * len(StrOffsets) + data_size) 622 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size, data_size) 623 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 624 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data 625 return obj, offset, real_size 626 627 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> str: 628 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 629 raise WrongObjectTypeError 630 631 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 632 if data_size: 633 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 634 data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]) 635 return data.decode() 636 else: 637 return str() 638 639 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 640 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 641 raise WrongObjectTypeError 642 643 shared_memory.free(offset) 644 645 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 646 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 647 raise WrongObjectTypeError 648 649 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 650 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 651 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size] 652 653 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 654 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 655 raise WrongObjectTypeError 656 657 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 658 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 659 return data_offset, data_size
645 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 646 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 647 raise WrongObjectTypeError 648 649 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 650 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 651 return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
653 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 654 if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 655 raise WrongObjectTypeError 656 657 data_size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data_size) 658 data_offset = offset + bs * len(BaseObjOffsets) + bs * StrOffsets.data 659 return data_offset, data_size
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
672def malloc_tinternal_list_true(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]: 673 capacity = (size << 1 if size else 16) if capacity is None else capacity 674 datas_sys_part_size = 8 * len(InternalListTrueOffsets) 675 offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, datas_sys_part_size + 8 * capacity) 676 data_offset = offset + datas_sys_part_size 677 write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, capacity) 678 write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size, size) 679 return offset, real_size
682def realloc_tinternal_list_true(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]: 683 datas_sys_part_size = 8 * len(InternalListTrueOffsets) 684 data_offset = offset + datas_sys_part_size 685 capacity = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity) 686 size = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size) 687 new_list_capacity = capacity << 1 if new_capacity is None else new_capacity 688 if new_capacity is None: 689 if desired_size is None: 690 new_list_capacity = capacity << 1 if capacity else 16 691 else: 692 new_list_capacity = desired_size << 1 if desired_size else 16 693 else: 694 new_list_capacity = new_capacity 695 696 if new_list_capacity < size: 697 new_list_capacity = size 698 699 new_offset, new_real_size = shared_memory.realloc(offset, datas_sys_part_size + 8 * new_list_capacity, loop_allowed, zero_mem) 700 data_offset = new_offset + datas_sys_part_size 701 write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, new_list_capacity) 702 return new_offset, new_real_size
705class IListTrue(BaseIObject, list): 706 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None: 707 self._shared_memory = shared_memory 708 self._base_address = shared_memory.base_address 709 if offset is None: 710 offset, real_size = shared_memory.malloc(ObjectType.tlist, 8) 711 self._offset = offset 712 self._offset__data = offset + 8 * len(BaseObjOffsets) 713 self._offset__pointer_to_internal_list = self._offset__data 714 715 if obj is None: 716 obj = list() 717 718 data_len = len(obj) 719 capacity_len = data_len << 1 if data_len else 16 720 internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len, capacity_len) 721 self._pointer_to_internal_list = internal_list_offset 722 for i, item in enumerate(obj): 723 item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item) 724 write_uint64(self._base_address, self._item_offset(i), item_offset) 725 else: 726 self._offset = offset 727 self._offset__data = offset + 8 * len(BaseObjOffsets) 728 self._offset__pointer_to_internal_list = self._offset__data 729 730 def raw_to_bytes(self, bytes_num: int) -> bytes: 731 start_index = self._pointer_to_internal_list 732 return self._shared_memory.read_mem(start_index, bytes_num) 733 # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num]) 734 735 @property 736 def _obj_size(self): 737 return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size) 738 739 @property 740 def _pointer_to_internal_list(self): 741 return read_uint64(self._base_address, self._offset__pointer_to_internal_list) 742 743 @_pointer_to_internal_list.setter 744 def _pointer_to_internal_list(self, value: Offset): 745 write_uint64(self._base_address, self._offset__pointer_to_internal_list, value) 746 747 @property 748 def _list_len(self): 749 return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * InternalListTrueOffsets.size) 750 751 @_list_len.setter 752 def _list_len(self, value: int): 753 write_uint64(self._base_address, self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * InternalListTrueOffsets.size, value) 754 755 @property 756 def _list_capacity(self): 757 return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * InternalListTrueOffsets.capacity) 758 759 def _item_offset(self, key: int) -> Offset: 760 return self._pointer_to_internal_list + 8 * len(BaseObjOffsets) + 8 * len(InternalListTrueOffsets) + key * 8 761 762 def __len__(self) -> int: 763 return self._list_len 764 765 def get_children_offsets(self) -> List[Offset]: 766 return [read_uint64(self._base_address, self._item_offset(i)) for i in range(self._list_len)] 767 768 def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]: 769 if isinstance(key, int): 770 if key < 0: 771 key += len(self) 772 if key < 0 or key >= len(self): 773 raise IndexError 774 775 item_offset = read_uint64(self._base_address, self._item_offset(key)) 776 return self._shared_memory.get_obj(item_offset) 777 elif isinstance(key, slice): 778 if key.step is not None: 779 raise NotImplementedError 780 781 if key.start is None: 782 start = 0 783 elif key.start < 0: 784 start = key.start + len(self) 785 else: 786 start = key.start 787 788 if key.stop is None: 789 stop = len(self) 790 elif key.stop < 0: 791 stop = key.stop + len(self) 792 else: 793 stop = key.stop 794 795 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 796 raise IndexError 797 798 result_list = list() 799 for i in range(start, stop): 800 item_offset = read_uint64(self._base_address, self._item_offset(i)) 801 result_list.append(self._shared_memory.get_obj(item_offset)) 802 return result_list 803 else: 804 raise TypeError 805 806 def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence]) -> Any: 807 if isinstance(key, int): 808 if key < 0: 809 key += len(self) 810 if key < 0 or key >= len(self): 811 raise IndexError 812 813 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 814 write_uint64(self._base_address, self._item_offset(key), item_offset) 815 elif isinstance(key, slice): 816 if key.step is not None: 817 raise NotImplementedError 818 819 if key.start is None: 820 start = 0 821 elif key.start < 0: 822 start = key.start + len(self) 823 else: 824 start = key.start 825 826 if key.stop is None: 827 stop = len(self) 828 elif key.stop < 0: 829 stop = key.stop + len(self) 830 else: 831 stop = key.stop 832 833 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 834 raise IndexError 835 836 for i in range(start, stop): 837 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value[i - start]) 838 write_uint64(self._base_address, self._item_offset(i), item_offset) 839 else: 840 raise TypeError 841 842 def __delitem__(self, key: Union[int, slice]) -> None: 843 if isinstance(key, int): 844 if key < 0: 845 key += len(self) 846 if key < 0 or key >= len(self): 847 raise IndexError 848 849 for i in range(key + 1, len(self)): 850 item_offset = read_uint64(self._base_address, self._item_offset(i)) 851 self._shared_memory.free(item_offset) 852 write_uint64(self._base_address, self._item_offset(i - 1), item_offset) 853 854 self._list_len -= 1 855 elif isinstance(key, slice): 856 if key.step is not None: 857 raise NotImplementedError 858 859 if key.start is None: 860 start = 0 861 elif key.start < 0: 862 start = key.start + len(self) 863 else: 864 start = key.start 865 866 if key.stop is None: 867 stop = len(self) 868 elif key.stop < 0: 869 stop = key.stop + len(self) 870 else: 871 stop = key.stop 872 873 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 874 raise IndexError 875 876 for i in range(start, stop): 877 item_offset = read_uint64(self._base_address, self._item_offset(i)) 878 self._shared_memory.free(item_offset) 879 880 del_items_num = stop - start 881 882 for i in range(stop, len(self)): 883 item_offset = read_uint64(self._base_address, self._item_offset(i)) 884 write_uint64(self._base_address, self._item_offset(i - del_items_num), item_offset) 885 886 self._list_len -= del_items_num 887 else: 888 raise TypeError 889 890 def append(self, item: Any) -> None: 891 if self._list_len > self._list_capacity: 892 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 893 894 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 895 write_uint64(self._base_address, self._item_offset(self._list_len), item_offset) 896 self._list_len += 1 897 898 def extend(self, items: Sequence) -> None: 899 items_num = len(items) 900 if self._list_len + items_num > self._list_capacity: 901 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 902 903 for i, item in enumerate(items): 904 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 905 write_uint64(self._base_address, self._item_offset(self._list_len + i), item_offset) 906 907 self._list_len += items_num 908 909 def insert(self, index: int, item: Any) -> None: 910 if index < 0: 911 index += len(self) 912 if index < 0 or index > len(self): 913 raise IndexError 914 915 if self._list_len > self._list_capacity: 916 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 917 918 for i in range(self._list_len, index, -1): 919 item_offset = read_uint64(self._base_address, self._item_offset(i - 1)) 920 write_uint64(self._base_address, self._item_offset(i), item_offset) 921 922 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 923 write_uint64(self._base_address, self._item_offset(index), item_offset) 924 self._list_len += 1 925 926 def pop(self, index: int = -1) -> Any: 927 if index < 0: 928 index += len(self) 929 if index < 0 or index >= len(self): 930 raise IndexError 931 932 item_offset = read_uint64(self._base_address, self._item_offset(index)) 933 result = self._shared_memory.get_obj(item_offset) 934 935 for i in range(index + 1, len(self)): 936 item_offset = read_uint64(self._base_address, self._item_offset(i)) 937 write_uint64(self._base_address, self._item_offset(i - 1), item_offset) 938 939 self._list_len -= 1 940 return result 941 942 def remove(self, item: Any) -> None: 943 for i in range(len(self)): 944 item_offset = read_uint64(self._base_address, self._item_offset(i)) 945 if item_offset == item._offset: 946 for j in range(i + 1, len(self)): 947 item_offset = read_uint64(self._base_address, self._item_offset(j)) 948 write_uint64(self._base_address, self._item_offset(j - 1), item_offset) 949 950 self._list_len -= 1 951 return 952 953 raise ValueError 954 955 def clear(self) -> None: 956 for i in range(len(self)): 957 item_offset = read_uint64(self._base_address, self._item_offset(i)) 958 self._shared_memory.free(item_offset) 959 960 self._list_len = 0 961 962 def __iter__(self): 963 return IListIterator(self) 964 965 def __reversed__(self): 966 return IListReversedIterator(self) 967 968 def __contains__(self, item: Any) -> bool: 969 for i in range(len(self)): 970 item_offset = read_uint64(self._base_address, self._item_offset(i)) 971 if item_offset == item._offset: 972 return True 973 974 return False 975 976 def index(self, item: Any, start: int = 0, stop: int = None) -> int: 977 if stop is None: 978 stop = len(self) 979 980 for i in range(start, stop): 981 item_offset = read_uint64(self._base_address, self._item_offset(i)) 982 if item_offset == item._offset: 983 return i 984 985 raise ValueError 986 987 def count(self, item: Any) -> int: 988 result = 0 989 for i in range(len(self)): 990 item_offset = read_uint64(self._base_address, self._item_offset(i)) 991 if item_offset == item._offset: 992 result += 1 993 994 return result 995 996 def reverse(self) -> None: 997 for i in range(len(self) // 2): 998 item_offset = read_uint64(self._base_address, self._item_offset(i)) 999 write_uint64(self._base_address, self._item_offset(i), read_uint64(self._base_address, self._item_offset(len(self) - i - 1))) 1000 write_uint64(self._base_address, self._item_offset(len(self) - i - 1), item_offset) 1001 1002 def sort(self, key: Any = None, reverse: bool = False) -> None: 1003 raise NotImplementedError 1004 1005 def copy(self) -> 'IList': 1006 result = IList(self._shared_memory) 1007 result.extend(self) 1008 return result 1009 1010 def __add__(self, other: Sequence) -> 'IList': 1011 result = IList(self._shared_memory) 1012 result.extend(self) 1013 result.extend(other) 1014 return result 1015 1016 def __iadd__(self, other: Sequence) -> 'IList': 1017 self.extend(other) 1018 return self 1019 1020 def __mul__(self, other: int) -> 'IList': 1021 result = IList(self._shared_memory) 1022 for i in range(other): 1023 result.extend(self) 1024 1025 return result 1026 1027 def __imul__(self, other: int) -> 'IList': 1028 my_copy: IList = self.copy() 1029 for i in range(other): 1030 self.extend(my_copy) 1031 1032 return self 1033 1034 def __rmul__(self, other: int) -> 'IList': 1035 return self.__mul__(other) 1036 1037 def __eq__(self, other: Sequence) -> bool: 1038 if len(self) != len(other): 1039 return False 1040 1041 for i in range(len(self)): 1042 if self[i] != other[i]: 1043 return False 1044 1045 return True 1046 1047 def __ne__(self, other: Sequence) -> bool: 1048 return not self.__eq__(other) 1049 1050 def __lt__(self, other: Sequence) -> bool: 1051 for i in range(len(self)): 1052 if self[i] >= other[i]: 1053 return False 1054 1055 return True 1056 1057 def __le__(self, other: Sequence) -> bool: 1058 for i in range(len(self)): 1059 if self[i] > other[i]: 1060 return False 1061 1062 return True 1063 1064 def __gt__(self, other: Sequence) -> bool: 1065 for i in range(len(self)): 1066 if self[i] <= other[i]: 1067 return False 1068 1069 return True 1070 1071 def __ge__(self, other: Sequence) -> bool: 1072 for i in range(len(self)): 1073 if self[i] < other[i]: 1074 return False 1075 1076 return True 1077 1078 def __repr__(self) -> str: 1079 return f'IList({list(self)})' 1080 1081 def __str__(self) -> str: 1082 return f'IList({list(self)})' 1083 1084 def __hash__(self) -> int: 1085 return hash(tuple(self)) 1086 1087 def __sizeof__(self) -> int: 1088 return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size) + read_uint64(self._base_address, self._pointer_to_internal_list, 8 * BaseObjOffsets.obj_size) 1089 1090 def export(self) -> list: 1091 return list(self) 1092 1093 # def __del__(self) -> None: 1094 # self._shared_memory.free(self._pointer_to_internal_list) 1095 # self._shared_memory.free(self._offset)
Built-in mutable sequence.
If no argument is given, the constructor creates a new empty list. The argument must be an iterable if specified.
706 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None: 707 self._shared_memory = shared_memory 708 self._base_address = shared_memory.base_address 709 if offset is None: 710 offset, real_size = shared_memory.malloc(ObjectType.tlist, 8) 711 self._offset = offset 712 self._offset__data = offset + 8 * len(BaseObjOffsets) 713 self._offset__pointer_to_internal_list = self._offset__data 714 715 if obj is None: 716 obj = list() 717 718 data_len = len(obj) 719 capacity_len = data_len << 1 if data_len else 16 720 internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len, capacity_len) 721 self._pointer_to_internal_list = internal_list_offset 722 for i, item in enumerate(obj): 723 item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item) 724 write_uint64(self._base_address, self._item_offset(i), item_offset) 725 else: 726 self._offset = offset 727 self._offset__data = offset + 8 * len(BaseObjOffsets) 728 self._offset__pointer_to_internal_list = self._offset__data
890 def append(self, item: Any) -> None: 891 if self._list_len > self._list_capacity: 892 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 893 894 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 895 write_uint64(self._base_address, self._item_offset(self._list_len), item_offset) 896 self._list_len += 1
Append object to the end of the list.
898 def extend(self, items: Sequence) -> None: 899 items_num = len(items) 900 if self._list_len + items_num > self._list_capacity: 901 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 902 903 for i, item in enumerate(items): 904 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 905 write_uint64(self._base_address, self._item_offset(self._list_len + i), item_offset) 906 907 self._list_len += items_num
Extend list by appending elements from the iterable.
909 def insert(self, index: int, item: Any) -> None: 910 if index < 0: 911 index += len(self) 912 if index < 0 or index > len(self): 913 raise IndexError 914 915 if self._list_len > self._list_capacity: 916 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 917 918 for i in range(self._list_len, index, -1): 919 item_offset = read_uint64(self._base_address, self._item_offset(i - 1)) 920 write_uint64(self._base_address, self._item_offset(i), item_offset) 921 922 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 923 write_uint64(self._base_address, self._item_offset(index), item_offset) 924 self._list_len += 1
Insert object before index.
926 def pop(self, index: int = -1) -> Any: 927 if index < 0: 928 index += len(self) 929 if index < 0 or index >= len(self): 930 raise IndexError 931 932 item_offset = read_uint64(self._base_address, self._item_offset(index)) 933 result = self._shared_memory.get_obj(item_offset) 934 935 for i in range(index + 1, len(self)): 936 item_offset = read_uint64(self._base_address, self._item_offset(i)) 937 write_uint64(self._base_address, self._item_offset(i - 1), item_offset) 938 939 self._list_len -= 1 940 return result
Remove and return item at index (default last).
Raises IndexError if list is empty or index is out of range.
942 def remove(self, item: Any) -> None: 943 for i in range(len(self)): 944 item_offset = read_uint64(self._base_address, self._item_offset(i)) 945 if item_offset == item._offset: 946 for j in range(i + 1, len(self)): 947 item_offset = read_uint64(self._base_address, self._item_offset(j)) 948 write_uint64(self._base_address, self._item_offset(j - 1), item_offset) 949 950 self._list_len -= 1 951 return 952 953 raise ValueError
Remove first occurrence of value.
Raises ValueError if the value is not present.
955 def clear(self) -> None: 956 for i in range(len(self)): 957 item_offset = read_uint64(self._base_address, self._item_offset(i)) 958 self._shared_memory.free(item_offset) 959 960 self._list_len = 0
Remove all items from list.
976 def index(self, item: Any, start: int = 0, stop: int = None) -> int: 977 if stop is None: 978 stop = len(self) 979 980 for i in range(start, stop): 981 item_offset = read_uint64(self._base_address, self._item_offset(i)) 982 if item_offset == item._offset: 983 return i 984 985 raise ValueError
Return first index of value.
Raises ValueError if the value is not present.
987 def count(self, item: Any) -> int: 988 result = 0 989 for i in range(len(self)): 990 item_offset = read_uint64(self._base_address, self._item_offset(i)) 991 if item_offset == item._offset: 992 result += 1 993 994 return result
Return number of occurrences of value.
996 def reverse(self) -> None: 997 for i in range(len(self) // 2): 998 item_offset = read_uint64(self._base_address, self._item_offset(i)) 999 write_uint64(self._base_address, self._item_offset(i), read_uint64(self._base_address, self._item_offset(len(self) - i - 1))) 1000 write_uint64(self._base_address, self._item_offset(len(self) - i - 1), item_offset)
Reverse IN PLACE.
Sort the list in ascending order and return None.
The sort is in-place (i.e. the list itself is modified) and stable (i.e. the order of two equal elements is maintained).
If a key function is given, apply it once to each list item and sort them, ascending or descending, according to their function values.
The reverse flag can be set to sort in descending order.
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
1112class InternalListFieldTypes(IntEnum): 1113 tnone = 0 1114 tobj = 1 1115 tint = 2 1116 tfloat = 3 1117 tbool = 4
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
1120def malloc_tinternal_list(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]: 1121 if (capacity is not None) and (size > capacity): 1122 raise ValueError 1123 1124 capacity = (size << 1 if size else 16) if capacity is None else capacity 1125 offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + capacity * bs * len(InternalListFieldOffsets), zero_mem=True) 1126 sys_data_offset = offset + bs * len(BaseObjOffsets) 1127 write_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.capacity, capacity) 1128 write_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.size, size) 1129 return offset, real_size
1132def realloc_tinternal_list(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]: 1133 if (desired_size is not None) and (new_capacity is not None) and (desired_size > new_capacity): 1134 raise ValueError 1135 1136 sys_data_offset = offset + bs * len(BaseObjOffsets) 1137 capacity = read_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.capacity) 1138 size = read_uint64(shared_memory.base_address, sys_data_offset + bs * InternalListOffsets.size) 1139 new_list_capacity = capacity << 1 if new_capacity is None else new_capacity 1140 if new_capacity is None: 1141 if desired_size is None: 1142 new_list_capacity = capacity << 1 if capacity else 16 1143 else: 1144 new_list_capacity = desired_size << 1 if desired_size else 16 1145 else: 1146 new_list_capacity = new_capacity 1147 1148 if new_list_capacity < size: 1149 new_list_capacity = size 1150 1151 if new_list_capacity == capacity: 1152 real_size = read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_size) 1153 return offset, real_size 1154 1155 new_offset, new_real_size = shared_memory.realloc( 1156 offset, 1157 bs * len(InternalListOffsets) + new_list_capacity * bs * len(InternalListFieldOffsets), 1158 loop_allowed, 1159 zero_mem 1160 ) 1161 new_sys_data_offset = new_offset + bs * len(BaseObjOffsets) 1162 write_uint64(shared_memory.base_address, new_sys_data_offset + bs * InternalListOffsets.capacity, new_list_capacity) 1163 return new_offset, new_real_size
1170def uint64_to_bytes(int_data: int) -> bytes: 1171 """ 1172 For a 64 bit unsigned int in little endian 1173 :param int_data: 1174 :return: bytes(); len == 8 1175 """ 1176 from struct import pack 1177 result = pack('<B', int_data) 1178 return result
For a 64 bit unsigned int in little endian :param int_data: :return: bytes(); len == 8
1181def uint8_to_bytes(int_data: int) -> bytes: 1182 """ 1183 For a 64 bit unsigned int in little endian 1184 :param int_data: 1185 :return: bytes(); len == 8 1186 """ 1187 from struct import pack 1188 result = pack('<Q', int_data) 1189 return result
For a 64 bit unsigned int in little endian :param int_data: :return: bytes(); len == 8
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
1200class IList(BaseIObject, list): 1201 __slots__ = ('_shared_memory', '_base_address', '_offset', '_offset__data', '_offset__pointer_to_internal_list') 1202 1203 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None: 1204 self._shared_memory = shared_memory 1205 self._base_address = shared_memory.base_address 1206 if offset is None: 1207 offset, real_size = shared_memory.malloc(ObjectType.tlist, bs * len(ListOffsets)) 1208 try: 1209 self._offset = offset 1210 self._offset__data = offset + bs * len(BaseObjOffsets) 1211 self._offset__pointer_to_internal_list = self._offset__data + bs * ListOffsets.internal_list_offset 1212 1213 if obj is None: 1214 obj = list() 1215 1216 data_len = len(obj) 1217 internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len) 1218 self._pointer_to_internal_list = internal_list_offset 1219 for i, item in enumerate(obj): 1220 # print(self.get_children_offsets()) 1221 # # print(self.raw_to_list(slice(0, None))) 1222 # print(self.raw_to_bytes(200)) 1223 self._write_item(i, item) 1224 # print(self.get_children_offsets()) 1225 # # print(self.raw_to_list(slice(0, None))) 1226 # print(self.raw_to_bytes(200)) 1227 1228 # print(self.get_children_offsets()) 1229 # # print(self.raw_to_list(slice(0, None))) 1230 # print(self.raw_to_bytes(200)) 1231 # print('=======================') 1232 except: 1233 self._free_mem() 1234 raise 1235 else: 1236 self._offset = offset 1237 self._offset__data = offset + bs * len(BaseObjOffsets) 1238 self._offset__pointer_to_internal_list = self._offset__data + bs * ListOffsets.internal_list_offset 1239 1240 def raw_to_list(self, key) -> List[bytes]: 1241 if isinstance(key, int): 1242 if key < 0: 1243 key += len(self) 1244 if key < 0 or key >= len(self): 1245 raise IndexError 1246 1247 item_offset = self._read_item_offset_or_data(key) 1248 return [uint64_to_bytes(item_offset)] 1249 elif isinstance(key, slice): 1250 if key.step is not None: 1251 raise NotImplementedError 1252 1253 if key.start is None: 1254 start = 0 1255 elif key.start < 0: 1256 start = key.start + len(self) 1257 else: 1258 start = key.start 1259 1260 if key.stop is None: 1261 stop = len(self) 1262 elif key.stop < 0: 1263 stop = key.stop + len(self) 1264 else: 1265 stop = key.stop 1266 1267 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1268 raise IndexError 1269 1270 result_list = list() 1271 for i in range(start, stop): 1272 item_offset = self._read_item_offset_or_data(i) 1273 result_list.append(uint64_to_bytes(item_offset)) 1274 1275 return result_list 1276 1277 def raw_to_bytes(self, bytes_num: int) -> bytes: 1278 start_index = self._pointer_to_internal_list 1279 return self._shared_memory.read_mem(start_index, bytes_num) 1280 # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num]) 1281 1282 @property 1283 def _obj_size(self): 1284 return read_uint64(self._base_address, self._offset + bs * BaseObjOffsets.obj_size) 1285 1286 @property 1287 def _pointer_to_internal_list(self): 1288 return read_uint64(self._base_address, self._offset__pointer_to_internal_list) 1289 1290 @_pointer_to_internal_list.setter 1291 def _pointer_to_internal_list(self, value: Offset): 1292 write_uint64(self._base_address, self._offset__pointer_to_internal_list, value) 1293 1294 @property 1295 def _list_len(self): 1296 return read_uint64(self._base_address, self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * InternalListOffsets.size) 1297 1298 @_list_len.setter 1299 def _list_len(self, value: int): 1300 write_uint64(self._base_address, self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * InternalListOffsets.size, value) 1301 1302 @property 1303 def _list_capacity(self): 1304 return read_uint64(self._base_address, self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * InternalListOffsets.capacity) 1305 1306 def _item_offset(self, key: int) -> Offset: 1307 return self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) 1308 1309 def _item_type_offset(self, key: int) -> Offset: 1310 # from os import getpid 1311 result = self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1312 # add_0 = bs * len(BaseObjOffsets) 1313 # add_1 = bs * len(InternalListOffsets) 1314 # add_2 = key * bs * len(InternalListFieldOffsets) 1315 # add_3 = bs * InternalListFieldOffsets.field_type 1316 # print(f'PID: {getpid()}. [{add_0},{add_1},{add_2},{add_3}],{add_0 + add_1 + add_2 + add_3},{self._pointer_to_internal_list}: item_type_offset: {key}:{result}') 1317 return result 1318 1319 def _item_value_offset(self, key: int) -> Offset: 1320 # from os import getpid 1321 result = self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data 1322 # print(f'PID: {getpid()}. {bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data},{self._pointer_to_internal_list}: item_value_offset: {key}:{result}') 1323 return result 1324 1325 def _read_item_type(self, key: int) -> int: 1326 return read_uint64(self._base_address, self._item_type_offset(key)) 1327 1328 def _write_item_type(self, key: int, item_type: int) -> None: 1329 write_uint64(self._base_address, self._item_type_offset(key), item_type) 1330 1331 def _read_item_offset_or_data(self, key: int) -> Union[Offset, int]: 1332 return read_uint64(self._base_address, self._item_value_offset(key)) 1333 1334 def _write_item_offset_or_data(self, key: int, offset_or_data: Union[Offset, int]) -> None: 1335 write_uint64(self._base_address, self._item_value_offset(key), offset_or_data) 1336 1337 # def _determine_obj_type(self, obj: Any) -> int: 1338 # if isinstance(obj, int): 1339 # return 1 1340 # elif isinstance(obj, float): 1341 # return 2 1342 # elif isinstance(obj, bool): 1343 # return 3 1344 # else: 1345 # return 0 1346 1347 def _determine_obj_type(self, obj: Any) -> int: 1348 if type(obj) is int: 1349 return InternalListFieldTypes.tint.value 1350 elif type(obj) is float: 1351 return InternalListFieldTypes.tfloat.value 1352 elif type(obj) is bool: 1353 return InternalListFieldTypes.tbool.value 1354 elif obj is None: 1355 return InternalListFieldTypes.tnone.value 1356 else: 1357 return InternalListFieldTypes.tobj.value 1358 1359 def _determine_obj_offset(self, obj: Any) -> Optional[Offset]: 1360 if isinstance(obj, BaseIObject): 1361 return obj._offset 1362 else: 1363 return None 1364 1365 def _compare_item_to_obj_fast(self, key: int, obj: Any, obj_type: int, obj_offset) -> bool: 1366 result: bool = False 1367 item_type = self._read_item_type(key) 1368 if item_type == obj_type: 1369 if item_type == InternalListFieldTypes.tobj.value: 1370 if obj_offset is None: 1371 if self._read_item_value(key, item_type) == obj: 1372 result = True 1373 else: 1374 if self._read_item_offset_or_data(key) == obj_offset: 1375 result = True 1376 elif item_type == InternalListFieldTypes.tint.value: 1377 if self._read_item_offset_or_data(key) == obj: 1378 result = True 1379 elif item_type == InternalListFieldTypes.tfloat.value: 1380 if self._read_item_offset_or_data(key) == obj: 1381 result = True 1382 elif item_type == InternalListFieldTypes.tbool.value: 1383 if self._read_item_offset_or_data(key) == obj: 1384 result = True 1385 elif item_type == InternalListFieldTypes.tnone.value: 1386 result = obj is None 1387 else: 1388 raise ValueError 1389 1390 return result 1391 1392 def _compare_item_to_obj(self, key: int, obj: Any) -> bool: 1393 obj_type = self._determine_obj_type(obj) 1394 obj_offset = self._determine_obj_offset(obj) 1395 return self._compare_item_to_obj_fast(key, obj, obj_type, obj_offset) 1396 1397 def _read_item_value(self, key: int, item_type: int) -> Any: 1398 if item_type == InternalListFieldTypes.tobj.value: 1399 item_offset = read_uint64(self._base_address, self._item_value_offset(key)) 1400 return self._shared_memory.get_obj(item_offset) 1401 elif item_type == InternalListFieldTypes.tint.value: 1402 return read_int64(self._base_address, self._item_value_offset(key)) 1403 elif item_type == InternalListFieldTypes.tfloat.value: 1404 return read_double(self._base_address, self._item_value_offset(key)) 1405 elif item_type == InternalListFieldTypes.tbool.value: 1406 return bool(read_uint64(self._base_address, self._item_value_offset(key))) 1407 elif item_type == InternalListFieldTypes.tnone.value: 1408 return None 1409 else: 1410 raise ValueError 1411 1412 def _write_item_value(self, key: int, item_type: int, value: Any) -> None: 1413 if item_type == InternalListFieldTypes.tobj.value: 1414 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 1415 write_uint64(self._base_address, self._item_value_offset(key), item_offset) 1416 elif item_type == InternalListFieldTypes.tint.value: 1417 write_int64(self._base_address, self._item_value_offset(key), value) 1418 elif item_type == InternalListFieldTypes.tfloat.value: 1419 write_double(self._base_address, self._item_value_offset(key), value) 1420 elif item_type == InternalListFieldTypes.tbool.value: 1421 write_uint64(self._base_address, self._item_value_offset(key), int(value)) 1422 elif item_type == InternalListFieldTypes.tnone.value: 1423 pass 1424 else: 1425 raise ValueError 1426 1427 def _free_item_value(self, key: int, item_type: int) -> None: 1428 if item_type == InternalListFieldTypes.tobj.value: 1429 item_offset = read_uint64(self._base_address, self._item_value_offset(key)) 1430 # self._shared_memory.free(item_offset) 1431 self._shared_memory.destroy_obj(item_offset) 1432 elif item_type == InternalListFieldTypes.tint.value: 1433 pass 1434 elif item_type == InternalListFieldTypes.tfloat.value: 1435 pass 1436 elif item_type == InternalListFieldTypes.tbool.value: 1437 pass 1438 elif item_type == InternalListFieldTypes.tnone.value: 1439 pass 1440 else: 1441 raise ValueError 1442 1443 self._write_item_type(key, InternalListFieldTypes.tnone.value) 1444 1445 def _read_item_type_and_value(self, key: int) -> Tuple[int, Any]: 1446 item_type = self._read_item_type(key) 1447 return item_type, self._read_item_value(key, item_type) 1448 1449 def _write_item_value_and_get_type(self, key: int, value: Any) -> int: 1450 if isinstance(value, int): 1451 write_uint64(self._base_address, self._item_value_offset(key), value) 1452 return InternalListFieldTypes.tint.value 1453 elif isinstance(value, float): 1454 write_double(self._base_address, self._item_value_offset(key), value) 1455 return InternalListFieldTypes.tfloat.value 1456 elif isinstance(value, bool): 1457 write_uint64(self._base_address, self._item_value_offset(key), int(value)) 1458 return InternalListFieldTypes.tbool.value 1459 elif value is None: 1460 return InternalListFieldTypes.tnone.value 1461 else: 1462 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 1463 write_uint64(self._base_address, self._item_value_offset(key), item_offset) 1464 return InternalListFieldTypes.tobj.value 1465 1466 def _free_item_value_and_get_type(self, key: int) -> int: 1467 item_type = self._read_item_type(key) 1468 self._free_item_value(key, item_type) 1469 return item_type 1470 1471 def _read_item(self, key: int) -> Any: 1472 item_type = self._read_item_type(key) 1473 return self._read_item_value(key, item_type) 1474 1475 def _write_item(self, key: int, value: Any) -> None: 1476 item_type = self._write_item_value_and_get_type(key, value) 1477 self._write_item_type(key, item_type) 1478 1479 def _free_item(self, key: int) -> None: 1480 item_type = self._read_item_type(key) 1481 self._free_item_value(key, item_type) 1482 1483 def _copy_item(self, src_key: int, dst_key: int) -> None: 1484 self._write_item_type(dst_key, self._read_item_type(src_key)) 1485 self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1486 1487 def copy_item(self, src_key: int, dst_key: int) -> None: 1488 return self._copy_item(src_key, dst_key) 1489 1490 def _move_item(self, src_key: int, dst_key: int) -> None: 1491 self._write_item_type(dst_key, self._read_item_type(src_key)) 1492 self._write_item_type(src_key, InternalListFieldTypes.tnone.value) 1493 self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1494 1495 def move_item(self, src_key: int, dst_key: int) -> None: 1496 return self._move_item(src_key, dst_key) 1497 1498 def copy_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None: 1499 other._write_item_type(dst_key, self._read_item_type(src_key)) 1500 other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1501 1502 def move_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None: 1503 other._write_item_type(dst_key, self._read_item_type(src_key)) 1504 self._write_item_type(src_key, InternalListFieldTypes.tnone.value) 1505 other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key)) 1506 1507 def _swap_items(self, key1: int, key2: int) -> None: 1508 item_type1 = self._read_item_type(key1) 1509 item_offset_or_data1 = self._read_item_offset_or_data(key1) 1510 self._write_item_type(key1, self._read_item_type(key2)) 1511 self._write_item_type(key2, item_type1) 1512 self._write_item_offset_or_data(key1, self._read_item_offset_or_data(key2)) 1513 self._write_item_offset_or_data(key2, item_offset_or_data1) 1514 1515 def swap_items(self, key1: int, key2: int) -> None: 1516 return self._swap_items(key1, key2) 1517 1518 def __len__(self) -> int: 1519 return self._list_len 1520 1521 def get_children_data_or_offsets(self) -> List[Offset]: 1522 return [self._read_item_offset_or_data(i) for i in range(self._list_len)] 1523 1524 def get_children_offsets(self): 1525 return self.get_children_data_or_offsets() 1526 1527 def _getitem_as_offset(self, key: int) -> Tuple[int, Offset]: 1528 return list__get_item_as_offset(key, self._base_address, self._offset__pointer_to_internal_list) 1529 1530 def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]: 1531 if isinstance(key, int): 1532 base_address = self._base_address 1533 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1534 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1535 self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1536 if key < 0 or key >= self_len: 1537 raise IndexError 1538 1539 return list__get_item(key, self._base_address, self._offset__pointer_to_internal_list, self._shared_memory.get_obj) 1540 1541 # base_address = self._base_address 1542 # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1543 # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1544 # self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1545 1546 # if key < 0: 1547 # key += self_len 1548 1549 # if key < 0 or key >= self_len: 1550 # raise IndexError 1551 1552 # item_type_offset = pointer_to_internal_list + 32 + key * 16 1553 # item_value_offset = pointer_to_internal_list + 40 + key * 16 1554 # item_type = read_uint64(base_address, item_type_offset) 1555 # if item_type == 1: 1556 # return read_int64(base_address, item_value_offset) 1557 # elif item_type == 2: 1558 # return read_double(base_address, item_value_offset) 1559 # elif item_type == 3: 1560 # return bool(read_uint64(base_address, item_value_offset)) 1561 # elif item_type == 0: 1562 # item_offset = read_uint64(base_address, item_value_offset) 1563 # return self._shared_memory.get_obj(item_offset) 1564 # else: 1565 # raise ValueError 1566 1567 # # return self._read_item(key) 1568 elif isinstance(key, slice): 1569 if key.step is not None: 1570 raise NotImplementedError 1571 1572 if key.start is None: 1573 start = 0 1574 elif key.start < 0: 1575 start = key.start + len(self) 1576 else: 1577 start = key.start 1578 1579 if key.stop is None: 1580 stop = len(self) 1581 elif key.stop < 0: 1582 stop = key.stop + len(self) 1583 else: 1584 stop = key.stop 1585 1586 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1587 raise IndexError 1588 1589 result_list = list() 1590 # performance improvement instead of using self._read_item(i) 1591 base_address = self._base_address 1592 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1593 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1594 1595 # item_type_offset = pointer_to_internal_list + 32 + i * 16 1596 item_type_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1597 1598 # item_value_offset = pointer_to_internal_list + 40 + i * 16 1599 item_value_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data 1600 1601 for i in range(start, stop): 1602 # result_list.append(self._read_item(i)) 1603 1604 # performance improvement instead of using self._read_item(i) 1605 item_type = read_uint64(base_address, item_type_offset) 1606 if item_type == InternalListFieldTypes.tint.value: 1607 result_list.append(read_int64(base_address, item_value_offset)) 1608 elif item_type == InternalListFieldTypes.tfloat.value: 1609 result_list.append(read_double(base_address, item_value_offset)) 1610 elif item_type == InternalListFieldTypes.tbool.value: 1611 result_list.append(bool(read_uint64(base_address, item_value_offset))) 1612 elif item_type == InternalListFieldTypes.tnone.value: 1613 result_list.append(None) 1614 elif item_type == InternalListFieldTypes.tobj.value: 1615 item_offset = read_uint64(base_address, item_value_offset) 1616 result_list.append(self._shared_memory.get_obj(item_offset)) 1617 else: 1618 raise ValueError 1619 1620 return result_list 1621 else: 1622 raise TypeError 1623 1624 def _setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item: bool = True) -> Any: 1625 value_item_type, value_item_offset = value_type_and_offset 1626 list__set_item_as_offset(key, value_item_type, value_item_offset, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj) 1627 1628 def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence], need_to_free_item: bool = True) -> Any: 1629 if isinstance(key, int): 1630 # print(f'{key=}, {value=}, {need_to_free_item=}') 1631 # internal_list_data_offset = self._pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + key * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1632 # internal_list_data_size = self._list_len * bs * len(InternalListFieldOffsets) 1633 # self._shared_memory.print_mem(internal_list_data_offset, internal_list_data_size, 'internal_list before list__set_item') 1634 1635 base_address = self._base_address 1636 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1637 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1638 self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1639 if key < 0 or key >= self_len: 1640 raise IndexError 1641 1642 list__set_item(key, value, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj, self._shared_memory.put_obj) 1643 1644 # base_address = self._base_address 1645 # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1646 # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1647 # self_len = read_uint64(base_address, pointer_to_internal_list + 24) 1648 1649 # if key < 0: 1650 # key += self_len 1651 1652 # if key < 0 or key >= self_len: 1653 # raise IndexError 1654 1655 # item_type_offset = pointer_to_internal_list + 32 + key * 16 1656 # item_value_offset = pointer_to_internal_list + 40 + key * 16 1657 # if isinstance(value, int): 1658 # write_int64(base_address, item_value_offset, value) 1659 # item_type = 1 1660 # elif isinstance(value, float): 1661 # write_double(base_address, item_value_offset, value) 1662 # item_type = 2 1663 # elif isinstance(value, bool): 1664 # write_uint64(base_address, item_value_offset, int(value)) 1665 # item_type = 3 1666 # else: 1667 # item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value) 1668 # write_uint64(base_address, item_value_offset, item_offset) 1669 # item_type = 0 1670 1671 # write_uint64(base_address, item_type_offset, item_type) 1672 1673 # # self._write_item(key, value) 1674 elif isinstance(key, slice): 1675 if key.step is not None: 1676 raise NotImplementedError 1677 1678 if key.start is None: 1679 start = 0 1680 elif key.start < 0: 1681 start = key.start + len(self) 1682 else: 1683 start = key.start 1684 1685 if key.stop is None: 1686 stop = len(self) 1687 elif key.stop < 0: 1688 stop = key.stop + len(self) 1689 else: 1690 stop = key.stop 1691 1692 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1693 raise IndexError 1694 1695 if need_to_free_item: 1696 for i in range(start, stop): 1697 self._free_item(i) 1698 1699 # performance improvement instead of using self._write_item(i, item) 1700 base_address = self._base_address 1701 offset__pointer_to_internal_list = self._offset__pointer_to_internal_list 1702 pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list) 1703 1704 # item_type_offset = pointer_to_internal_list + 32 + i * 16 1705 item_type_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.field_type 1706 1707 # item_value_offset = pointer_to_internal_list + 40 + i * 16 1708 item_value_offset = pointer_to_internal_list + bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + i * bs * len(InternalListFieldOffsets) + bs * InternalListFieldOffsets.offset_or_data 1709 1710 for i in range(start, stop): 1711 item = value[i - start] 1712 # self._write_item(i, item) 1713 1714 # performance improvement instead of using self._write_item(i, item) 1715 if isinstance(item, int): 1716 write_int64(base_address, item_value_offset, item) 1717 item_type = InternalListFieldTypes.tint.value 1718 elif isinstance(item, float): 1719 write_double(base_address, item_value_offset, item) 1720 item_type = InternalListFieldTypes.tfloat.value 1721 elif isinstance(item, bool): 1722 write_uint64(base_address, item_value_offset, int(item)) 1723 item_type = InternalListFieldTypes.tbool.value 1724 elif item is None: 1725 item_type = InternalListFieldTypes.tnone.value 1726 else: 1727 item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item) 1728 write_uint64(base_address, item_value_offset, item_offset) 1729 item_type = InternalListFieldTypes.tobj.value 1730 1731 write_uint64(base_address, item_type_offset, item_type) 1732 else: 1733 raise TypeError 1734 1735 def __delitem__(self, key: Union[int, slice], need_to_free_item: bool = True) -> None: 1736 if isinstance(key, int): 1737 if key < 0: 1738 key += len(self) 1739 if key < 0 or key >= len(self): 1740 raise IndexError 1741 1742 if need_to_free_item: 1743 self._free_item(key) 1744 1745 for i in range(key + 1, len(self)): 1746 self._move_item(i, i - 1) 1747 1748 self._list_len -= 1 1749 elif isinstance(key, slice): 1750 if key.step is not None: 1751 raise NotImplementedError 1752 1753 if key.start is None: 1754 start = 0 1755 elif key.start < 0: 1756 start = key.start + len(self) 1757 else: 1758 start = key.start 1759 1760 if key.stop is None: 1761 stop = len(self) 1762 elif key.stop < 0: 1763 stop = key.stop + len(self) 1764 else: 1765 stop = key.stop 1766 1767 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1768 raise IndexError 1769 1770 if need_to_free_item: 1771 for i in range(start, stop): 1772 self._free_item(i) 1773 1774 del_items_num = stop - start 1775 1776 for i in range(stop, len(self)): 1777 self._move_item(i, i - del_items_num) 1778 1779 self._list_len -= del_items_num 1780 else: 1781 raise TypeError 1782 1783 def append(self, item: Any) -> None: 1784 if self._list_len > self._list_capacity: 1785 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1786 1787 self._list_len += 1 1788 self.__setitem__(self._list_len - 1, item, need_to_free_item=False) 1789 1790 def append_as_offset(self, value_type_and_offset: Tuple[int, Offset]) -> None: 1791 if self._list_len > self._list_capacity: 1792 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1793 1794 self._list_len += 1 1795 self._setitem_as_offset(self._list_len - 1, value_type_and_offset, need_to_free_item=False) 1796 1797 def getitem_as_offset(self, key: int) -> Tuple[int, Offset]: 1798 return self._getitem_as_offset(key) 1799 1800 def setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item=True) -> None: 1801 self._setitem_as_offset(key, value_type_and_offset, need_to_free_item) 1802 1803 def extend(self, items: Sequence) -> None: 1804 items_num = len(items) 1805 if (self._list_len + items_num) > self._list_capacity: 1806 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 1807 1808 original_list_len = self._list_len 1809 self._list_len += items_num 1810 for i, item in enumerate(items): 1811 self.__setitem__(original_list_len + i, item, need_to_free_item=False) 1812 1813 def extend_with(self, items_num: int, value = None) -> None: 1814 if (self._list_len + items_num) > self._list_capacity: 1815 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 1816 1817 original_list_len = self._list_len 1818 self._list_len += items_num 1819 for i in range(items_num): 1820 self.__setitem__(original_list_len + i, value, need_to_free_item=False) 1821 1822 def set_capacity(self, capacity: int) -> int: 1823 if capacity <= self._list_capacity: 1824 return 1825 1826 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, capacity) 1827 return result_size 1828 1829 def insert(self, index: int, item: Any) -> None: 1830 if index < 0: 1831 index += len(self) 1832 if index < 0 or index > len(self): 1833 raise IndexError 1834 1835 if self._list_len > self._list_capacity: 1836 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'before realloc. {}') 1837 # self.print_internal_list('before realloc. {}') 1838 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1839 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after realloc. {}') 1840 # self.print_internal_list('after realloc. {}') 1841 1842 # self.print_internal_list('before inserting {}') 1843 self._list_len += 1 1844 # self.print_internal_list('before inserting but after +1 {}') 1845 for i in range(self._list_len - 1, index, -1): 1846 self._move_item(i - 1, i) 1847 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, f'after self._move_item({i - 1, i}). {{}}') 1848 # self.print_internal_list(f'after self._move_item({i - 1, i}). {{}}') 1849 1850 self.__setitem__(index, item, need_to_free_item=False) 1851 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after inserting. {}') 1852 # self.print_internal_list('after inserting. {}') 1853 1854 def print_internal_list(self, text: str = None, additional_cells: int = 0): 1855 internal_list = self._shared_memory.read_mem(self._pointer_to_internal_list, bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + self._list_len * bs * len(InternalListFieldOffsets) + additional_cells * bs * len(InternalListFieldOffsets)) 1856 print('--- internal list -------------') 1857 if text: 1858 print(text.format(self._pointer_to_internal_list)) 1859 print('------') 1860 1861 index = 0 1862 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1863 index += bs 1864 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1865 index += bs 1866 print('---') 1867 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1868 index += bs 1869 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1870 index += bs 1871 print('---') 1872 for i in range(self._list_len): 1873 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs * 2]) 1874 index += bs * 2 1875 1876 if additional_cells: 1877 print('------') 1878 for i in range(additional_cells): 1879 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1880 index += bs * 2 1881 print('-------------------------------') 1882 print() 1883 1884 def pop(self, index: int = -1) -> Any: 1885 if index < 0: 1886 index += len(self) 1887 if index < 0 or index >= len(self): 1888 raise IndexError 1889 1890 result = self.__getitem__(index) 1891 1892 for i in range(index + 1, len(self)): 1893 self._move_item(i, i - 1) 1894 1895 self._list_len -= 1 1896 return result 1897 1898 def remove(self, obj: Any) -> None: 1899 obj_type = self._determine_obj_type(obj) 1900 obj_offset = self._determine_obj_offset(obj) 1901 found_in_index = None 1902 for i in range(len(self)): 1903 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1904 found_in_index = i 1905 break 1906 1907 if found_in_index is None: 1908 raise ValueError 1909 else: 1910 self.__delitem__(found_in_index) 1911 1912 def clear(self, need_to_free_item: bool = True) -> None: 1913 if need_to_free_item: 1914 for i in range(len(self)): 1915 self._free_item(i) 1916 1917 self._list_len = 0 1918 1919 def __iter__(self): 1920 return IListIterator(self) 1921 1922 def __reversed__(self): 1923 return IListReversedIterator(self) 1924 1925 def __contains__(self, obj: Any) -> bool: 1926 obj_type = self._determine_obj_type(obj) 1927 obj_offset = self._determine_obj_offset(obj) 1928 found_in_index = None 1929 for i in range(len(self)): 1930 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1931 found_in_index = i 1932 break 1933 1934 if found_in_index is None: 1935 return False 1936 else: 1937 return True 1938 1939 def index(self, obj: Any, start: int = 0, stop: int = None) -> int: 1940 if stop is None: 1941 stop = len(self) 1942 1943 obj_type = self._determine_obj_type(obj) 1944 obj_offset = self._determine_obj_offset(obj) 1945 found_in_index = None 1946 for i in range(start, stop): 1947 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1948 found_in_index = i 1949 break 1950 1951 if found_in_index is None: 1952 raise ValueError 1953 else: 1954 return found_in_index 1955 1956 def count(self, obj: Any) -> int: 1957 obj_type = self._determine_obj_type(obj) 1958 obj_offset = self._determine_obj_offset(obj) 1959 result = 0 1960 for i in range(len(self)): 1961 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1962 result += 1 1963 1964 return result 1965 1966 def reverse(self) -> None: 1967 my_len = len(self) 1968 for i in range(my_len // 2): 1969 self._swap_items(i, my_len - i - 1) 1970 1971 def sort(self, key: Any = None, reverse: bool = False) -> None: 1972 raise NotImplementedError 1973 1974 def copy(self) -> 'IList': 1975 result = IList(self._shared_memory) 1976 result.extend(self) 1977 return result 1978 1979 def __add__(self, other: Sequence) -> 'IList': 1980 result = IList(self._shared_memory) 1981 result.extend(self) 1982 result.extend(other) 1983 return result 1984 1985 def __iadd__(self, other: Sequence) -> 'IList': 1986 self.extend(other) 1987 return self 1988 1989 def __mul__(self, other: int) -> 'IList': 1990 result = IList(self._shared_memory) 1991 for i in range(other): 1992 result.extend(self) 1993 1994 return result 1995 1996 def __imul__(self, other: int) -> 'IList': 1997 my_copy: IList = self.copy() 1998 for i in range(other): 1999 self.extend(my_copy) 2000 2001 return self 2002 2003 def __rmul__(self, other: int) -> 'IList': 2004 return self.__mul__(other) 2005 2006 def __eq__(self, other: Sequence) -> bool: 2007 if len(self) != len(other): 2008 return False 2009 2010 for i in range(len(self)): 2011 if self[i] != other[i]: 2012 return False 2013 2014 return True 2015 2016 def __ne__(self, other: Sequence) -> bool: 2017 return not self.__eq__(other) 2018 2019 def __lt__(self, other: Sequence) -> bool: 2020 for i in range(len(self)): 2021 if self[i] >= other[i]: 2022 return False 2023 2024 return True 2025 2026 def __le__(self, other: Sequence) -> bool: 2027 for i in range(len(self)): 2028 if self[i] > other[i]: 2029 return False 2030 2031 return True 2032 2033 def __gt__(self, other: Sequence) -> bool: 2034 for i in range(len(self)): 2035 if self[i] <= other[i]: 2036 return False 2037 2038 return True 2039 2040 def __ge__(self, other: Sequence) -> bool: 2041 for i in range(len(self)): 2042 if self[i] < other[i]: 2043 return False 2044 2045 return True 2046 2047 def __repr__(self) -> str: 2048 return f'IList({list(self)})' 2049 2050 def __str__(self) -> str: 2051 return f'IList({list(self)})' 2052 2053 def __hash__(self) -> int: 2054 return hash(tuple(self)) 2055 2056 def __sizeof__(self) -> int: 2057 return bs * len(BaseObjOffsets) + read_uint64(self._base_address, self._offset + bs * BaseObjOffsets.obj_size) + bs * len(BaseObjOffsets) + read_uint64(self._base_address, self._pointer_to_internal_list, bs * BaseObjOffsets.obj_size) 2058 2059 def export(self) -> list: 2060 return list(self) 2061 2062 # def __del__(self) -> None: 2063 # self._shared_memory.free(self._pointer_to_internal_list) 2064 # self._shared_memory.free(self._offset) 2065 2066 def _free_mem(self): 2067 if self._offset is not None: 2068 if self._pointer_to_internal_list is not None: 2069 self.clear() 2070 destroy_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 2071 self._pointer_to_internal_list = 0 2072 2073 self._shared_memory.free(self._offset) 2074 self._offset = None
Built-in mutable sequence.
If no argument is given, the constructor creates a new empty list. The argument must be an iterable if specified.
1203 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None: 1204 self._shared_memory = shared_memory 1205 self._base_address = shared_memory.base_address 1206 if offset is None: 1207 offset, real_size = shared_memory.malloc(ObjectType.tlist, bs * len(ListOffsets)) 1208 try: 1209 self._offset = offset 1210 self._offset__data = offset + bs * len(BaseObjOffsets) 1211 self._offset__pointer_to_internal_list = self._offset__data + bs * ListOffsets.internal_list_offset 1212 1213 if obj is None: 1214 obj = list() 1215 1216 data_len = len(obj) 1217 internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len) 1218 self._pointer_to_internal_list = internal_list_offset 1219 for i, item in enumerate(obj): 1220 # print(self.get_children_offsets()) 1221 # # print(self.raw_to_list(slice(0, None))) 1222 # print(self.raw_to_bytes(200)) 1223 self._write_item(i, item) 1224 # print(self.get_children_offsets()) 1225 # # print(self.raw_to_list(slice(0, None))) 1226 # print(self.raw_to_bytes(200)) 1227 1228 # print(self.get_children_offsets()) 1229 # # print(self.raw_to_list(slice(0, None))) 1230 # print(self.raw_to_bytes(200)) 1231 # print('=======================') 1232 except: 1233 self._free_mem() 1234 raise 1235 else: 1236 self._offset = offset 1237 self._offset__data = offset + bs * len(BaseObjOffsets) 1238 self._offset__pointer_to_internal_list = self._offset__data + bs * ListOffsets.internal_list_offset
1240 def raw_to_list(self, key) -> List[bytes]: 1241 if isinstance(key, int): 1242 if key < 0: 1243 key += len(self) 1244 if key < 0 or key >= len(self): 1245 raise IndexError 1246 1247 item_offset = self._read_item_offset_or_data(key) 1248 return [uint64_to_bytes(item_offset)] 1249 elif isinstance(key, slice): 1250 if key.step is not None: 1251 raise NotImplementedError 1252 1253 if key.start is None: 1254 start = 0 1255 elif key.start < 0: 1256 start = key.start + len(self) 1257 else: 1258 start = key.start 1259 1260 if key.stop is None: 1261 stop = len(self) 1262 elif key.stop < 0: 1263 stop = key.stop + len(self) 1264 else: 1265 stop = key.stop 1266 1267 if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop: 1268 raise IndexError 1269 1270 result_list = list() 1271 for i in range(start, stop): 1272 item_offset = self._read_item_offset_or_data(i) 1273 result_list.append(uint64_to_bytes(item_offset)) 1274 1275 return result_list
1502 def move_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None: 1503 other._write_item_type(dst_key, self._read_item_type(src_key)) 1504 self._write_item_type(src_key, InternalListFieldTypes.tnone.value) 1505 other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1783 def append(self, item: Any) -> None: 1784 if self._list_len > self._list_capacity: 1785 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1786 1787 self._list_len += 1 1788 self.__setitem__(self._list_len - 1, item, need_to_free_item=False)
Append object to the end of the list.
1790 def append_as_offset(self, value_type_and_offset: Tuple[int, Offset]) -> None: 1791 if self._list_len > self._list_capacity: 1792 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1793 1794 self._list_len += 1 1795 self._setitem_as_offset(self._list_len - 1, value_type_and_offset, need_to_free_item=False)
1803 def extend(self, items: Sequence) -> None: 1804 items_num = len(items) 1805 if (self._list_len + items_num) > self._list_capacity: 1806 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 1807 1808 original_list_len = self._list_len 1809 self._list_len += items_num 1810 for i, item in enumerate(items): 1811 self.__setitem__(original_list_len + i, item, need_to_free_item=False)
Extend list by appending elements from the iterable.
1813 def extend_with(self, items_num: int, value = None) -> None: 1814 if (self._list_len + items_num) > self._list_capacity: 1815 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num) 1816 1817 original_list_len = self._list_len 1818 self._list_len += items_num 1819 for i in range(items_num): 1820 self.__setitem__(original_list_len + i, value, need_to_free_item=False)
1829 def insert(self, index: int, item: Any) -> None: 1830 if index < 0: 1831 index += len(self) 1832 if index < 0 or index > len(self): 1833 raise IndexError 1834 1835 if self._list_len > self._list_capacity: 1836 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'before realloc. {}') 1837 # self.print_internal_list('before realloc. {}') 1838 self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list) 1839 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after realloc. {}') 1840 # self.print_internal_list('after realloc. {}') 1841 1842 # self.print_internal_list('before inserting {}') 1843 self._list_len += 1 1844 # self.print_internal_list('before inserting but after +1 {}') 1845 for i in range(self._list_len - 1, index, -1): 1846 self._move_item(i - 1, i) 1847 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, f'after self._move_item({i - 1, i}). {{}}') 1848 # self.print_internal_list(f'after self._move_item({i - 1, i}). {{}}') 1849 1850 self.__setitem__(index, item, need_to_free_item=False) 1851 # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after inserting. {}') 1852 # self.print_internal_list('after inserting. {}')
Insert object before index.
1854 def print_internal_list(self, text: str = None, additional_cells: int = 0): 1855 internal_list = self._shared_memory.read_mem(self._pointer_to_internal_list, bs * len(BaseObjOffsets) + bs * len(InternalListOffsets) + self._list_len * bs * len(InternalListFieldOffsets) + additional_cells * bs * len(InternalListFieldOffsets)) 1856 print('--- internal list -------------') 1857 if text: 1858 print(text.format(self._pointer_to_internal_list)) 1859 print('------') 1860 1861 index = 0 1862 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1863 index += bs 1864 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1865 index += bs 1866 print('---') 1867 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1868 index += bs 1869 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1870 index += bs 1871 print('---') 1872 for i in range(self._list_len): 1873 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs * 2]) 1874 index += bs * 2 1875 1876 if additional_cells: 1877 print('------') 1878 for i in range(additional_cells): 1879 print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + bs]) 1880 index += bs * 2 1881 print('-------------------------------') 1882 print()
1884 def pop(self, index: int = -1) -> Any: 1885 if index < 0: 1886 index += len(self) 1887 if index < 0 or index >= len(self): 1888 raise IndexError 1889 1890 result = self.__getitem__(index) 1891 1892 for i in range(index + 1, len(self)): 1893 self._move_item(i, i - 1) 1894 1895 self._list_len -= 1 1896 return result
Remove and return item at index (default last).
Raises IndexError if list is empty or index is out of range.
1898 def remove(self, obj: Any) -> None: 1899 obj_type = self._determine_obj_type(obj) 1900 obj_offset = self._determine_obj_offset(obj) 1901 found_in_index = None 1902 for i in range(len(self)): 1903 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1904 found_in_index = i 1905 break 1906 1907 if found_in_index is None: 1908 raise ValueError 1909 else: 1910 self.__delitem__(found_in_index)
Remove first occurrence of value.
Raises ValueError if the value is not present.
1912 def clear(self, need_to_free_item: bool = True) -> None: 1913 if need_to_free_item: 1914 for i in range(len(self)): 1915 self._free_item(i) 1916 1917 self._list_len = 0
Remove all items from list.
1939 def index(self, obj: Any, start: int = 0, stop: int = None) -> int: 1940 if stop is None: 1941 stop = len(self) 1942 1943 obj_type = self._determine_obj_type(obj) 1944 obj_offset = self._determine_obj_offset(obj) 1945 found_in_index = None 1946 for i in range(start, stop): 1947 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1948 found_in_index = i 1949 break 1950 1951 if found_in_index is None: 1952 raise ValueError 1953 else: 1954 return found_in_index
Return first index of value.
Raises ValueError if the value is not present.
1956 def count(self, obj: Any) -> int: 1957 obj_type = self._determine_obj_type(obj) 1958 obj_offset = self._determine_obj_offset(obj) 1959 result = 0 1960 for i in range(len(self)): 1961 if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset): 1962 result += 1 1963 1964 return result
Return number of occurrences of value.
1966 def reverse(self) -> None: 1967 my_len = len(self) 1968 for i in range(my_len // 2): 1969 self._swap_items(i, my_len - i - 1)
Reverse IN PLACE.
Sort the list in ascending order and return None.
The sort is in-place (i.e. the list itself is modified) and stable (i.e. the order of two equal elements is maintained).
If a key function is given, apply it once to each list item and sort them, ascending or descending, according to their function values.
The reverse flag can be set to sort in descending order.
2080class IListIterator: 2081 def __init__(self, ilist: IList) -> None: 2082 self._ilist = ilist 2083 self._index = 0 2084 2085 def __next__(self): 2086 if self._index < len(self._ilist): 2087 # self._ilist.print_internal_list(f'ListIterator[{self._index}]. {{}}') 2088 result = self._ilist[self._index] 2089 self._index += 1 2090 return result 2091 else: 2092 raise StopIteration 2093 2094 def __iter__(self): 2095 return self
2098class IListReversedIterator: 2099 def __init__(self, ilist: IList) -> None: 2100 self._ilist = ilist 2101 self._index = len(ilist) - 1 2102 2103 def __next__(self): 2104 if self._index >= 0: 2105 result = self._ilist[self._index] 2106 self._index -= 1 2107 return result 2108 else: 2109 raise StopIteration 2110 2111 def __iter__(self): 2112 return self
2115class TList: 2116 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: list) -> Tuple[list, Offset, Size]: 2117 obj = IList(shared_memory, obj=obj) 2118 return obj, obj._offset, obj._obj_size 2119 2120 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2121 if ObjectType.tlist != read_uint64(shared_memory.base_address, offset): 2122 raise WrongObjectTypeError 2123 2124 return IList(shared_memory, offset) 2125 2126 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 2127 if ObjectType.tlist != read_uint64(shared_memory.base_address, offset): 2128 raise WrongObjectTypeError 2129 2130 obj: IList = IList(shared_memory, offset) 2131 obj._free_mem()
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2146class TTuple: 2147 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: tuple) -> Tuple[tuple, Offset, Size]: 2148 offset, real_size = shared_memory.malloc(ObjectType.ttuple, bs * len(TupleOffsets) + len(obj) * bs * len(TupleFieldOffsets)) 2149 created_items_offsets: List[Offset] = list() 2150 try: 2151 if (1, [2, 3]) == obj: 2152 shared_memory.offset_to_be_monitored = offset 2153 2154 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TupleOffsets.size, len(obj)) 2155 for i, item in enumerate(obj): 2156 item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item) 2157 created_items_offsets.append(item_offset) 2158 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * len(TupleOffsets) + i * bs * len(TupleFieldOffsets), item_offset) 2159 except: 2160 shared_memory.free(offset) 2161 for item_offset in created_items_offsets: 2162 shared_memory.destroy_obj(item_offset) 2163 2164 raise 2165 2166 return obj, offset, real_size 2167 2168 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2169 if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset): 2170 raise WrongObjectTypeError 2171 2172 result_list = list() 2173 size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TupleOffsets.size) 2174 for i in range(size): 2175 item_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * len(TupleOffsets) + i * bs * len(TupleFieldOffsets)) 2176 result_list.append(shared_memory.get_obj(item_offset)) 2177 2178 return tuple(result_list) 2179 2180 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 2181 if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset): 2182 raise WrongObjectTypeError 2183 2184 size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TupleOffsets.size) 2185 for i in range(size): 2186 item_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * len(TupleOffsets) + i * bs * len(TupleFieldOffsets)) 2187 shared_memory.destroy_obj(item_offset) 2188 2189 shared_memory.free(offset)
2180 def destroy(self, shared_memory: 'SharedMemory', offset: Offset): 2181 if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset): 2182 raise WrongObjectTypeError 2183 2184 size = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TupleOffsets.size) 2185 for i in range(size): 2186 item_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * len(TupleOffsets) + i * bs * len(TupleFieldOffsets)) 2187 shared_memory.destroy_obj(item_offset) 2188 2189 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2203class TDatetime: 2204 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: DatetimeTypes) -> Tuple[DatetimeTypes, Offset, Size]: 2205 offset, real_size = shared_memory.malloc(ObjectType.tdatetime, bs * len(DatetimeOffsets)) 2206 created_items_offsets: List[Offset] = list() 2207 try: 2208 data_tuple_mapped_obj, data_bytes_offset, data_tuple_size = shared_memory.put_obj(pickle_dumps(obj)) 2209 created_items_offsets.append(data_bytes_offset) 2210 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DatetimeOffsets.data_bytes_offset, data_bytes_offset) 2211 except: 2212 shared_memory.free(offset) 2213 for item_offset in created_items_offsets: 2214 shared_memory.destroy_obj(item_offset) 2215 2216 raise 2217 2218 return pickle_loads(data_tuple_mapped_obj), offset, real_size 2219 2220 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> DatetimeTypes: 2221 if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset): 2222 raise WrongObjectTypeError 2223 2224 data_bytes_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DatetimeOffsets.data_bytes_offset) 2225 result_tuple = shared_memory.get_obj(data_bytes_offset) 2226 return pickle_loads(result_tuple) 2227 2228 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2229 if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset): 2230 raise WrongObjectTypeError 2231 2232 data_bytes_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DatetimeOffsets.data_bytes_offset) 2233 shared_memory.destroy_obj(data_bytes_offset) 2234 shared_memory.free(offset)
2228 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2229 if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset): 2230 raise WrongObjectTypeError 2231 2232 data_bytes_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DatetimeOffsets.data_bytes_offset) 2233 shared_memory.destroy_obj(data_bytes_offset) 2234 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2245class TDecimal: 2246 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Decimal) -> Tuple[Decimal, Offset, Size]: 2247 offset, real_size = shared_memory.malloc(ObjectType.tdecimal, bs * len(DecimalOffsets)) 2248 created_items_offsets: List[Offset] = list() 2249 try: 2250 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.as_tuple())) 2251 created_items_offsets.append(data_tuple_offset) 2252 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DecimalOffsets.data_tuple_offset, data_tuple_offset) 2253 except: 2254 shared_memory.free(offset) 2255 for item_offset in created_items_offsets: 2256 shared_memory.destroy_obj(item_offset) 2257 2258 raise 2259 2260 return Decimal(data_tuple_mapped_obj), offset, real_size 2261 2262 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Decimal: 2263 if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset): 2264 raise WrongObjectTypeError 2265 2266 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DecimalOffsets.data_tuple_offset) 2267 result_tuple = shared_memory.get_obj(data_tuple_offset) 2268 return Decimal(result_tuple) 2269 2270 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2271 if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset): 2272 raise WrongObjectTypeError 2273 2274 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DecimalOffsets.data_tuple_offset) 2275 shared_memory.destroy_obj(data_tuple_offset) 2276 shared_memory.free(offset)
2270 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2271 if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset): 2272 raise WrongObjectTypeError 2273 2274 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * DecimalOffsets.data_tuple_offset) 2275 shared_memory.destroy_obj(data_tuple_offset) 2276 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2287class TSlice: 2288 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: slice) -> Tuple[slice, Offset, Size]: 2289 offset, real_size = shared_memory.malloc(ObjectType.tslice, bs * len(SliceOffsets)) 2290 created_items_offsets: List[Offset] = list() 2291 try: 2292 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.start, obj.stop, obj.step)) 2293 created_items_offsets.append(data_tuple_offset) 2294 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SliceOffsets.data_tuple_offset, data_tuple_offset) 2295 except: 2296 shared_memory.free(offset) 2297 for item_offset in created_items_offsets: 2298 shared_memory.destroy_obj(item_offset) 2299 2300 raise 2301 2302 return slice(*data_tuple_mapped_obj), offset, real_size 2303 2304 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> slice: 2305 if ObjectType.tslice != read_uint64(shared_memory.base_address, offset): 2306 raise WrongObjectTypeError 2307 2308 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SliceOffsets.data_tuple_offset) 2309 result_tuple = shared_memory.get_obj(data_tuple_offset) 2310 return slice(*result_tuple) 2311 2312 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2313 if ObjectType.tslice != read_uint64(shared_memory.base_address, offset): 2314 raise WrongObjectTypeError 2315 2316 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SliceOffsets.data_tuple_offset) 2317 shared_memory.destroy_obj(data_tuple_offset) 2318 shared_memory.free(offset)
2312 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2313 if ObjectType.tslice != read_uint64(shared_memory.base_address, offset): 2314 raise WrongObjectTypeError 2315 2316 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * SliceOffsets.data_tuple_offset) 2317 shared_memory.destroy_obj(data_tuple_offset) 2318 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2329class TComplex: 2330 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: complex) -> Tuple[complex, Offset, Size]: 2331 offset, real_size = shared_memory.malloc(ObjectType.tfastset, bs * len(ComplexOffsets)) 2332 created_items_offsets: List[Offset] = list() 2333 try: 2334 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.real, obj.imag)) 2335 created_items_offsets.append(data_tuple_offset) 2336 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * ComplexOffsets.data_tuple_offset, data_tuple_offset) 2337 except: 2338 shared_memory.free(offset) 2339 for item_offset in created_items_offsets: 2340 shared_memory.destroy_obj(item_offset) 2341 2342 raise 2343 2344 return complex(real=data_tuple_mapped_obj[0], imag=data_tuple_mapped_obj[1]), offset, real_size 2345 2346 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> complex: 2347 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2348 raise WrongObjectTypeError 2349 2350 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * ComplexOffsets.data_tuple_offset) 2351 result_tuple = shared_memory.get_obj(data_tuple_offset) 2352 return complex(real=result_tuple[0], imag=result_tuple[1]) 2353 2354 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2355 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2356 raise WrongObjectTypeError 2357 2358 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * ComplexOffsets.data_tuple_offset) 2359 shared_memory.destroy_obj(data_tuple_offset) 2360 shared_memory.free(offset)
2354 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2355 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2356 raise WrongObjectTypeError 2357 2358 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * ComplexOffsets.data_tuple_offset) 2359 shared_memory.destroy_obj(data_tuple_offset) 2360 shared_memory.free(offset)
set() -> new empty set object set(iterable) -> new set object
Build an unordered collection of unique elements.
Inherited Members
- builtins.set
- set
- add
- clear
- copy
- discard
- difference
- difference_update
- intersection
- intersection_update
- isdisjoint
- issubset
- issuperset
- pop
- remove
- symmetric_difference
- symmetric_difference_update
- union
- update
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2375class TFastSet: 2376 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[set, Offset, Size]: 2377 offset, real_size = shared_memory.malloc(ObjectType.tfastset, bs * len(FastSetOffsets)) 2378 created_items_offsets: List[Offset] = list() 2379 try: 2380 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj)) 2381 created_items_offsets.append(data_tuple_offset) 2382 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastSetOffsets.data_tuple_offset, data_tuple_offset) 2383 except: 2384 shared_memory.free(offset) 2385 for item_offset in created_items_offsets: 2386 shared_memory.destroy_obj(item_offset) 2387 2388 raise 2389 2390 return set(data_tuple_mapped_obj), offset, real_size 2391 2392 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> set: 2393 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2394 raise WrongObjectTypeError 2395 2396 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastSetOffsets.data_tuple_offset) 2397 result_tuple = shared_memory.get_obj(data_tuple_offset) 2398 return set(result_tuple) 2399 2400 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2401 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2402 raise WrongObjectTypeError 2403 2404 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastSetOffsets.data_tuple_offset) 2405 shared_memory.destroy_obj(data_tuple_offset) 2406 shared_memory.free(offset)
2400 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2401 if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset): 2402 raise WrongObjectTypeError 2403 2404 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastSetOffsets.data_tuple_offset) 2405 shared_memory.destroy_obj(data_tuple_offset) 2406 shared_memory.free(offset)
Inherited Members
- builtins.dict
- get
- setdefault
- pop
- popitem
- keys
- items
- values
- update
- fromkeys
- clear
- copy
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2421class TFastDict: 2422 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: dict) -> Tuple[dict, Offset, Size]: 2423 offset, real_size = shared_memory.malloc(ObjectType.tfastdict, bs * len(FastDictOffsets)) 2424 created_items_offsets: List[Offset] = list() 2425 try: 2426 data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.items())) 2427 created_items_offsets.append(data_tuple_offset) 2428 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastDictOffsets.data_tuple_offset, data_tuple_offset) 2429 except: 2430 shared_memory.free(offset) 2431 for item_offset in created_items_offsets: 2432 shared_memory.destroy_obj(item_offset) 2433 2434 raise 2435 2436 return dict(data_tuple_mapped_obj), offset, real_size 2437 2438 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict: 2439 if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset): 2440 raise WrongObjectTypeError 2441 2442 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastDictOffsets.data_tuple_offset) 2443 result_tuple = shared_memory.get_obj(data_tuple_offset) 2444 return dict(result_tuple) 2445 2446 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2447 if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset): 2448 raise WrongObjectTypeError 2449 2450 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastDictOffsets.data_tuple_offset) 2451 shared_memory.destroy_obj(data_tuple_offset) 2452 shared_memory.free(offset)
2446 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2447 if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset): 2448 raise WrongObjectTypeError 2449 2450 data_tuple_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * FastDictOffsets.data_tuple_offset) 2451 shared_memory.destroy_obj(data_tuple_offset) 2452 shared_memory.free(offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2471class SetHashmapItemOffsets(IntEnum): 2472 field_type = 0 2473 field_hash = 1 2474 obj_or_bucket = 2
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2482class ISet(BaseIObject, AbsSet): 2483 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_hash_bits', '_capacity', '_size', 'hashmap', 'hashmap_offset', 'buckets') 2484 2485 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsSet = None) -> None: 2486 self._shared_memory = shared_memory 2487 self._base_address = shared_memory.base_address 2488 self._obj_size = None 2489 self._offset: Offset = None 2490 self._offset__data: Offset = None 2491 self._offset__size_offset: Offset = None 2492 self._offset__capacity_offset: Offset = None 2493 self._offset__hashmap_offset: Offset = None 2494 self._load_factor = 0.75 2495 self._hash_bits: int = None 2496 self._capacity: int = None 2497 self._size: int = None 2498 self.hashmap: IList = None 2499 self.hashmap_offset: Offset = None 2500 self.buckets: Dict[int, IList] = dict() 2501 2502 if offset is None: 2503 if obj is None: 2504 # obj = frozenset(set()) 2505 data_len = 16 2506 else: 2507 data_len = len(obj) 2508 2509 self._size: int = data_len 2510 self.hash_bits = 1 2511 self.capacity = int(ceil(data_len / self._load_factor)) 2512 2513 offset, self._obj_size = shared_memory.malloc(ObjectType.tset, bs * len(SetOffsets)) 2514 try: 2515 self._offset = offset 2516 offset__data = offset + bs * len(BaseObjOffsets) 2517 self._offset__data = offset__data 2518 self._offset__size_offset: Offset = offset__data + bs * SetOffsets.size.value 2519 self._offset__capacity_offset: Offset = offset__data + bs * SetOffsets.capacity.value 2520 self._offset__hashmap_offset = offset__data + bs * SetOffsets.hashmap_offset.value 2521 2522 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 2523 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 2524 2525 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 2526 self.hashmap = cast(IList, self.hashmap) 2527 self.hashmap_offset = hashmap_offset 2528 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 2529 hashmap_capacity = self.capacity * len(SetHashmapItemOffsets) 2530 self.hashmap.set_capacity(hashmap_capacity) 2531 self.hashmap.extend_with(hashmap_capacity, 0) 2532 hash_bits: int = self.hash_bits 2533 if obj is not None: 2534 for item in obj: 2535 item_hash = hash(item) 2536 item_info_index: int = mask_least_significant_bits(item_hash, hash_bits) * len(SetHashmapItemOffsets) 2537 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2538 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2539 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2540 field_type = self.hashmap[field_type_index] 2541 if SetHashmapFieldTypes.tnone.value == field_type: 2542 self.hashmap[field_type_index] = SetHashmapFieldTypes.tobj.value 2543 self.hashmap[item_hash_index] = item_hash 2544 self.hashmap[item_bucket_index] = item 2545 elif SetHashmapFieldTypes.tobj.value == field_type: 2546 bucket, bucket_offset, _ = shared_memory.put_obj(list()) 2547 bucket = cast(IList, bucket) 2548 bucket.set_capacity(len(SetBucketOffsets)) 2549 bucket.extend_with(len(SetBucketOffsets), 0) 2550 self.buckets[item_info_index] = bucket 2551 self.hashmap.move_item_to_list(item_hash_index, bucket, SetBucketOffsets.field_hash.value) 2552 self.hashmap.move_item_to_list(item_bucket_index, bucket, SetBucketOffsets.obj.value) 2553 self.hashmap[field_type_index] = SetHashmapFieldTypes.tbucket.value 2554 self.hashmap[item_bucket_index] = bucket_offset 2555 bucket.append(item_hash) 2556 bucket.append(item) 2557 elif SetHashmapFieldTypes.tbucket.value == field_type: 2558 bucket = self.buckets[item_info_index] 2559 bucket.append(item_hash) 2560 bucket.append(item) 2561 else: 2562 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2563 except: 2564 self._free_mem() 2565 raise 2566 else: 2567 self._offset = offset 2568 offset__data = offset + bs * len(BaseObjOffsets) 2569 self._offset__data = offset__data 2570 self._offset__size_offset: Offset = offset__data + bs * SetOffsets.size 2571 self._offset__capacity_offset: Offset = offset__data + bs * SetOffsets.capacity 2572 self._offset__hashmap_offset = offset__data + bs * SetOffsets.hashmap_offset 2573 2574 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2575 self.hash_bits = 1 2576 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2577 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2578 2579 self.hashmap_offset = hashmap_offset 2580 self.hashmap = IList(shared_memory, hashmap_offset) 2581 item_info_index: int = 0 2582 # for item_info_index in range(self.capacity): 2583 # field_type_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.field_type.value 2584 # item_hash_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.field_hash.value 2585 # item_bucket_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.obj_or_bucket.value 2586 # field_type = self.hashmap[field_type_index] 2587 # if SetHashmapFieldTypes.tnone.value == field_type: 2588 # continue 2589 # elif SetHashmapFieldTypes.tobj.value == field_type: 2590 # continue 2591 # elif SetHashmapFieldTypes.tbucket.value == field_type: 2592 # bucket_offset = self.hashmap[item_bucket_index] 2593 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2594 # else: 2595 # raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2596 2597 for item_info_index in range(0, self.capacity * len(SetHashmapItemOffsets), len(SetHashmapItemOffsets)): 2598 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2599 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2600 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2601 field_type = self.hashmap[field_type_index] 2602 if SetHashmapFieldTypes.tnone.value == field_type: 2603 continue 2604 elif SetHashmapFieldTypes.tobj.value == field_type: 2605 continue 2606 elif SetHashmapFieldTypes.tbucket.value == field_type: 2607 bucket_offset = self.hashmap[item_bucket_index] 2608 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2609 else: 2610 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2611 2612 def __len__(self): 2613 return self._size 2614 2615 def __iter__(self): 2616 return ISetIterator(self) 2617 2618 def __contains__(self, obj: Any) -> bool: 2619 item_hash = hash(obj) 2620 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(SetHashmapItemOffsets) 2621 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2622 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2623 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2624 field_type = self.hashmap[field_type_index] 2625 if SetHashmapFieldTypes.tnone.value == field_type: 2626 return False 2627 elif SetHashmapFieldTypes.tobj.value == field_type: 2628 return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]) 2629 elif SetHashmapFieldTypes.tbucket.value == field_type: 2630 bucket = self.buckets[item_info_index] 2631 # for sub_item_info_index in range(len(bucket)): 2632 for sub_item_info_index in range(0, len(bucket) * len(SetBucketOffsets), len(SetBucketOffsets)): 2633 sub_item_hash_index = sub_item_info_index + SetBucketOffsets.field_hash.value 2634 sub_item_obj_index = sub_item_info_index + SetBucketOffsets.obj.value 2635 if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_obj_index]): 2636 return True 2637 2638 return False 2639 else: 2640 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2641 2642 def __hash__(self): 2643 return self._hash() 2644 2645 @property 2646 def hash_bits(self) -> int: 2647 return self._hash_bits 2648 2649 @hash_bits.setter 2650 def hash_bits(self, value: int) -> None: 2651 self._hash_bits = value 2652 self._capacity = 2 ** value 2653 2654 @property 2655 def capacity(self) -> int: 2656 return self._capacity 2657 2658 @capacity.setter 2659 def capacity(self, value: int) -> None: 2660 if value <= self._capacity: 2661 return 2662 2663 if value <= 2: 2664 self.hash_bits = 1 2665 else: 2666 self.hash_bits = int(ceil(log2(value))) 2667 2668 def __str__(self) -> str: 2669 return set(self).__str__() 2670 2671 def __repr__(self) -> str: 2672 return set(self).__repr__() 2673 2674 def _free_mem(self): 2675 if self._offset is not None: 2676 for _, bucket in self.buckets.items(): 2677 self._shared_memory.destroy_obj(bucket._offset) 2678 2679 self.buckets.clear() 2680 if self.hashmap_offset is not None: 2681 self._shared_memory.destroy_obj(self.hashmap_offset) 2682 self.hashmap_offset = None 2683 2684 self._shared_memory.free(self._offset) 2685 self._offset = None
A set is a finite, iterable container.
This class provides concrete generic implementations of all methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the semantics are fixed), redefine __le__ and __ge__, then the other operations will automatically follow suit.
2485 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsSet = None) -> None: 2486 self._shared_memory = shared_memory 2487 self._base_address = shared_memory.base_address 2488 self._obj_size = None 2489 self._offset: Offset = None 2490 self._offset__data: Offset = None 2491 self._offset__size_offset: Offset = None 2492 self._offset__capacity_offset: Offset = None 2493 self._offset__hashmap_offset: Offset = None 2494 self._load_factor = 0.75 2495 self._hash_bits: int = None 2496 self._capacity: int = None 2497 self._size: int = None 2498 self.hashmap: IList = None 2499 self.hashmap_offset: Offset = None 2500 self.buckets: Dict[int, IList] = dict() 2501 2502 if offset is None: 2503 if obj is None: 2504 # obj = frozenset(set()) 2505 data_len = 16 2506 else: 2507 data_len = len(obj) 2508 2509 self._size: int = data_len 2510 self.hash_bits = 1 2511 self.capacity = int(ceil(data_len / self._load_factor)) 2512 2513 offset, self._obj_size = shared_memory.malloc(ObjectType.tset, bs * len(SetOffsets)) 2514 try: 2515 self._offset = offset 2516 offset__data = offset + bs * len(BaseObjOffsets) 2517 self._offset__data = offset__data 2518 self._offset__size_offset: Offset = offset__data + bs * SetOffsets.size.value 2519 self._offset__capacity_offset: Offset = offset__data + bs * SetOffsets.capacity.value 2520 self._offset__hashmap_offset = offset__data + bs * SetOffsets.hashmap_offset.value 2521 2522 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 2523 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 2524 2525 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 2526 self.hashmap = cast(IList, self.hashmap) 2527 self.hashmap_offset = hashmap_offset 2528 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 2529 hashmap_capacity = self.capacity * len(SetHashmapItemOffsets) 2530 self.hashmap.set_capacity(hashmap_capacity) 2531 self.hashmap.extend_with(hashmap_capacity, 0) 2532 hash_bits: int = self.hash_bits 2533 if obj is not None: 2534 for item in obj: 2535 item_hash = hash(item) 2536 item_info_index: int = mask_least_significant_bits(item_hash, hash_bits) * len(SetHashmapItemOffsets) 2537 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2538 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2539 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2540 field_type = self.hashmap[field_type_index] 2541 if SetHashmapFieldTypes.tnone.value == field_type: 2542 self.hashmap[field_type_index] = SetHashmapFieldTypes.tobj.value 2543 self.hashmap[item_hash_index] = item_hash 2544 self.hashmap[item_bucket_index] = item 2545 elif SetHashmapFieldTypes.tobj.value == field_type: 2546 bucket, bucket_offset, _ = shared_memory.put_obj(list()) 2547 bucket = cast(IList, bucket) 2548 bucket.set_capacity(len(SetBucketOffsets)) 2549 bucket.extend_with(len(SetBucketOffsets), 0) 2550 self.buckets[item_info_index] = bucket 2551 self.hashmap.move_item_to_list(item_hash_index, bucket, SetBucketOffsets.field_hash.value) 2552 self.hashmap.move_item_to_list(item_bucket_index, bucket, SetBucketOffsets.obj.value) 2553 self.hashmap[field_type_index] = SetHashmapFieldTypes.tbucket.value 2554 self.hashmap[item_bucket_index] = bucket_offset 2555 bucket.append(item_hash) 2556 bucket.append(item) 2557 elif SetHashmapFieldTypes.tbucket.value == field_type: 2558 bucket = self.buckets[item_info_index] 2559 bucket.append(item_hash) 2560 bucket.append(item) 2561 else: 2562 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2563 except: 2564 self._free_mem() 2565 raise 2566 else: 2567 self._offset = offset 2568 offset__data = offset + bs * len(BaseObjOffsets) 2569 self._offset__data = offset__data 2570 self._offset__size_offset: Offset = offset__data + bs * SetOffsets.size 2571 self._offset__capacity_offset: Offset = offset__data + bs * SetOffsets.capacity 2572 self._offset__hashmap_offset = offset__data + bs * SetOffsets.hashmap_offset 2573 2574 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2575 self.hash_bits = 1 2576 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2577 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2578 2579 self.hashmap_offset = hashmap_offset 2580 self.hashmap = IList(shared_memory, hashmap_offset) 2581 item_info_index: int = 0 2582 # for item_info_index in range(self.capacity): 2583 # field_type_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.field_type.value 2584 # item_hash_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.field_hash.value 2585 # item_bucket_index = item_info_index * len(SetHashmapItemOffsets) + SetHashmapItemOffsets.obj_or_bucket.value 2586 # field_type = self.hashmap[field_type_index] 2587 # if SetHashmapFieldTypes.tnone.value == field_type: 2588 # continue 2589 # elif SetHashmapFieldTypes.tobj.value == field_type: 2590 # continue 2591 # elif SetHashmapFieldTypes.tbucket.value == field_type: 2592 # bucket_offset = self.hashmap[item_bucket_index] 2593 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2594 # else: 2595 # raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2596 2597 for item_info_index in range(0, self.capacity * len(SetHashmapItemOffsets), len(SetHashmapItemOffsets)): 2598 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2599 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2600 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2601 field_type = self.hashmap[field_type_index] 2602 if SetHashmapFieldTypes.tnone.value == field_type: 2603 continue 2604 elif SetHashmapFieldTypes.tobj.value == field_type: 2605 continue 2606 elif SetHashmapFieldTypes.tbucket.value == field_type: 2607 bucket_offset = self.hashmap[item_bucket_index] 2608 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2609 else: 2610 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
Inherited Members
- collections.abc.Set
- isdisjoint
2688class ISetIterator: 2689 def __init__(self, iset: ISet) -> None: 2690 self._iset = iset 2691 self._index = 0 2692 self._sub_index = 0 2693 2694 def __next__(self): 2695 while self._index < self._iset.capacity: 2696 item_info_index: int = self._index * len(SetHashmapItemOffsets) 2697 field_type_index = item_info_index + SetHashmapItemOffsets.field_type.value 2698 item_hash_index = item_info_index + SetHashmapItemOffsets.field_hash.value 2699 item_bucket_index = item_info_index + SetHashmapItemOffsets.obj_or_bucket.value 2700 field_type = self._iset.hashmap[field_type_index] 2701 if SetHashmapFieldTypes.tnone.value == field_type: 2702 self._index += 1 2703 continue 2704 elif SetHashmapFieldTypes.tobj.value == field_type: 2705 result = self._iset.hashmap[item_bucket_index] 2706 self._index += 1 2707 break 2708 elif SetHashmapFieldTypes.tbucket.value == field_type: 2709 bucket = self._iset.buckets[item_info_index] 2710 sub_item_info_index = self._sub_index 2711 sub_item_hash_index = sub_item_info_index * len(SetBucketOffsets) + SetBucketOffsets.field_hash.value 2712 sub_item_obj_index = sub_item_info_index * len(SetBucketOffsets) + SetBucketOffsets.obj.value 2713 if (sub_item_info_index * len(SetBucketOffsets)) >= len(bucket): 2714 self._sub_index = 0 2715 self._index += 1 2716 continue 2717 2718 result = bucket[sub_item_obj_index] 2719 self._sub_index += 1 2720 break 2721 else: 2722 raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2723 else: 2724 raise StopIteration 2725 2726 return result 2727 2728 def __iter__(self): 2729 return self
2732class TSet: 2733 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[AbsSet, Offset, Size]: 2734 obj: ISet = ISet(shared_memory, obj=obj) 2735 return obj, obj._offset, obj._obj_size 2736 2737 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> ISet: 2738 if ObjectType.tset != read_uint64(shared_memory.base_address, offset): 2739 raise WrongObjectTypeError 2740 2741 return ISet(shared_memory, offset) 2742 2743 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 2744 if ObjectType.tset != read_uint64(shared_memory.base_address, offset): 2745 raise WrongObjectTypeError 2746 2747 obj: ISet = ISet(shared_memory, offset) 2748 obj._free_mem()
2755class MutableSetOffsets(IntEnum): 2756 size = 0 2757 capacity = 1 2758 hashmap_offset = 2 2759 refresh_counter = 3
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2768class MutableSetHashmapItemOffsets(IntEnum): 2769 field_type = 0 2770 field_hash = 1 2771 obj_or_bucket = 2
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
2785class IMutableSet(BaseIObject, AbsMutableSet): 2786 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', '_refresh_counter', 'hashmap_offset', 'buckets', 'ignore_rehash') 2787 2788 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableSet = None) -> None: 2789 self._shared_memory = shared_memory 2790 self._base_address = shared_memory.base_address 2791 self._obj_size = None 2792 self._offset: Offset = None 2793 self._offset__data: Offset = None 2794 self._offset__size_offset: Offset = None 2795 self._offset__capacity_offset: Offset = None 2796 self._offset__hashmap_offset: Offset = None 2797 self._offset__refresh_counter_offset: Offset = None 2798 self._load_factor = 0.75 2799 self._load_factor_2 = 0.5625 2800 self._hash_bits: int = None 2801 self._capacity: int = None 2802 self._min_capacity: int = None 2803 self._size: int = None 2804 self.hashmap: IList = None 2805 self._refresh_counter: int = 0 2806 self.hashmap_offset: Offset = None 2807 self.buckets: Dict[int, IList] = dict() 2808 2809 self.ignore_rehash: bool = True 2810 2811 if offset is None: 2812 if obj is None: 2813 # obj = frozenset(set()) 2814 data_len = 16 2815 else: 2816 data_len = len(obj) 2817 2818 self._size = 0 2819 self.hash_bits = 1 2820 self.capacity = int(ceil(data_len / self._load_factor)) 2821 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2822 2823 offset, self._obj_size = shared_memory.malloc(ObjectType.tmutableset, bs * len(MutableSetOffsets)) 2824 try: 2825 self._offset = offset 2826 offset__data = offset + bs * len(BaseObjOffsets) 2827 self._offset__data = offset__data 2828 self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size.value 2829 self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity.value 2830 self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset.value 2831 self._offset__refresh_counter_offset = offset__data + bs * MutableSetOffsets.refresh_counter.value 2832 2833 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 2834 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 2835 write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter) 2836 2837 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 2838 self.hashmap = cast(IList, self.hashmap) 2839 self.hashmap_offset = hashmap_offset 2840 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 2841 hashmap_capacity = self.capacity * len(MutableSetHashmapItemOffsets) 2842 self.hashmap.set_capacity(hashmap_capacity) 2843 self.hashmap.extend_with(hashmap_capacity, 0) 2844 hash_bits: int = self.hash_bits 2845 if obj is None: 2846 pass 2847 elif isinstance(obj, IMutableSet): 2848 self._move_from(obj) 2849 else: 2850 for item in obj: 2851 self.add(item) 2852 2853 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 2854 2855 self.ignore_rehash = False 2856 except: 2857 self._free_mem() 2858 raise 2859 else: 2860 self._refresh_hashmap(offset) 2861 self.ignore_rehash = False 2862 2863 # self._offset = offset 2864 # offset__data = offset + bs * len(BaseObjOffsets) 2865 # self._offset__data = offset__data 2866 # self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size 2867 # self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity 2868 # self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset 2869 2870 # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2871 # self.hash_bits = 1 2872 # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2873 # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2874 # self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2875 2876 # self.hashmap_offset = hashmap_offset 2877 # self.hashmap = IList(shared_memory, hashmap_offset) 2878 # item_info_index: int = 0 2879 # # for item_info_index in range(self.capacity): 2880 # # field_type_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_type.value 2881 # # item_hash_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_hash.value 2882 # # item_bucket_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.obj_or_bucket.value 2883 # # field_type = self.hashmap[field_type_index] 2884 # # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2885 # # continue 2886 # # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2887 # # continue 2888 # # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2889 # # bucket_offset = self.hashmap[item_bucket_index] 2890 # # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2891 # # else: 2892 # # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2893 2894 # for item_info_index in range(0, self.capacity * len(MutableSetHashmapItemOffsets), len(MutableSetHashmapItemOffsets)): 2895 # field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 2896 # item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 2897 # item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 2898 # field_type = self.hashmap[field_type_index] 2899 # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2900 # continue 2901 # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2902 # continue 2903 # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2904 # bucket_offset = self.hashmap[item_bucket_index] 2905 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2906 # else: 2907 # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2908 2909 # self.ignore_rehash = False 2910 2911 def _refresh_hashmap(self, offset: Offset): 2912 # ignore_rehash = self.ignore_rehash 2913 # self.ignore_rehash = True 2914 2915 self._hash_bits = None 2916 self._capacity = None 2917 self._min_capacity = None 2918 self._size = None 2919 self.hashmap = None 2920 self._refresh_counter = 0 2921 self.hashmap_offset = None 2922 self.buckets = dict() 2923 2924 shared_memory = self._shared_memory 2925 self._offset = offset 2926 offset__data = offset + bs * len(BaseObjOffsets) 2927 self._offset__data = offset__data 2928 self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size 2929 self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity 2930 self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset 2931 self._offset__refresh_counter_offset = offset__data + bs * MutableSetOffsets.refresh_counter.value 2932 2933 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 2934 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2935 self.hash_bits = 1 2936 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2937 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2938 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2939 2940 self.hashmap_offset = hashmap_offset 2941 self.hashmap = IList(shared_memory, hashmap_offset) 2942 item_info_index: int = 0 2943 # for item_info_index in range(self.capacity): 2944 # field_type_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_type.value 2945 # item_hash_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_hash.value 2946 # item_bucket_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.obj_or_bucket.value 2947 # field_type = self.hashmap[field_type_index] 2948 # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2949 # continue 2950 # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2951 # continue 2952 # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2953 # bucket_offset = self.hashmap[item_bucket_index] 2954 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2955 # else: 2956 # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2957 2958 for item_info_index in range(0, self.capacity * len(MutableSetHashmapItemOffsets), len(MutableSetHashmapItemOffsets)): 2959 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 2960 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 2961 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 2962 field_type = self.hashmap[field_type_index] 2963 if MutableSetHashmapFieldTypes.tnone.value == field_type: 2964 continue 2965 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2966 continue 2967 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2968 bucket_offset = self.hashmap[item_bucket_index] 2969 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2970 else: 2971 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2972 2973 # self.ignore_rehash = ignore_rehash 2974 2975 @property 2976 def refresh_counter(self): 2977 return read_uint64(self._base_address, self._offset__refresh_counter_offset) 2978 2979 def _increase_refresh_counter(self): 2980 if not self.ignore_rehash: 2981 self._refresh_counter += 1 2982 write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter) 2983 2984 def _check_hashmap(self): 2985 if self.ignore_rehash: 2986 return False 2987 else: 2988 base_address = self._base_address 2989 refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset) 2990 # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset) 2991 # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset): 2992 if self._refresh_counter != refresh_counter: 2993 self._refresh_hashmap(self._offset) 2994 return True 2995 2996 return False 2997 2998 # @property 2999 # def hashmap(self) -> IList: 3000 # if self.ignore_rehash: 3001 # return self._hashmap 3002 # else: 3003 # hashmap_offset = read_uint64(self._base_address, self._offset__hashmap_offset) 3004 # if (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset): 3005 # self._refresh_hashmap(self._offset) 3006 3007 # return self._hashmap 3008 3009 # @hashmap.setter 3010 # def hashmap(self, value: IList): 3011 # self._hashmap = value 3012 3013 def _increase_size(self): 3014 self._size += 1 3015 write_uint64(self._base_address, self._offset__size_offset, self._size) 3016 if (self._size > self._capacity) or (self._size < self._min_capacity): 3017 self._rehash() 3018 3019 def _decrease_size(self): 3020 self._size -= 1 3021 if self._size < 0: 3022 raise RuntimeError('Size of the set is negative') 3023 3024 write_uint64(self._base_address, self._offset__size_offset, self._size) 3025 if (self._size > self._capacity) or (self._size < self._min_capacity): 3026 self._rehash() 3027 3028 def _move_from(self, other: 'IMutableSet'): 3029 for value_hash, value_type, value_offset in other.iter_offset_pop(): 3030 self.add_as_offset(value_hash, value_type, value_offset) 3031 3032 def _rehash(self): 3033 if self.ignore_rehash: 3034 return 3035 3036 self._increase_refresh_counter() 3037 3038 ignore_rehash = self.ignore_rehash 3039 self.ignore_rehash = True 3040 3041 new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self) 3042 new_other = cast(IMutableSet, new_other) 3043 3044 other_capacity = new_other._capacity 3045 other_hash_bits = new_other._hash_bits 3046 other_min_capacity = new_other._min_capacity 3047 other_size = new_other._size 3048 # other_refresh_counter = new_other._refresh_counter 3049 other_hashmap = new_other.hashmap 3050 other_hashmap_offset = new_other.hashmap_offset 3051 other_buckets = new_other.buckets 3052 other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset) 3053 other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset) 3054 other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset) 3055 # other_refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset) 3056 3057 new_other._capacity = self._capacity 3058 new_other._hash_bits = self._hash_bits 3059 new_other._min_capacity = self._min_capacity 3060 new_other._size = self._size 3061 # new_other._refresh_counter = self._refresh_counter 3062 new_other.hashmap = self.hashmap 3063 new_other.hashmap_offset = self.hashmap_offset 3064 new_other.buckets = self.buckets 3065 write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset)) 3066 write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset)) 3067 write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset)) 3068 # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset)) 3069 3070 self._capacity = other_capacity 3071 self._hash_bits = other_hash_bits 3072 self._min_capacity = other_min_capacity 3073 self._size = other_size 3074 # self._refresh_counter = other_refresh_counter 3075 self.hashmap = other_hashmap 3076 self.hashmap_offset = other_hashmap_offset 3077 self.buckets = other_buckets 3078 write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin) 3079 write_uint64(self._base_address, self._offset__size_offset, other_size_bin) 3080 write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin) 3081 # write_uint64(self._base_address, self._offset__refresh_counter_offset, other_refresh_counter_bin) 3082 3083 self._shared_memory.destroy_obj(new_other_offset) 3084 3085 self.ignore_rehash = ignore_rehash 3086 3087 def __len__(self): 3088 self._check_hashmap() 3089 return self._size 3090 3091 def __iter__(self): 3092 self._check_hashmap() 3093 return IMutableSetIterator(self) 3094 3095 def iter_offset(self): 3096 self._check_hashmap() 3097 return IMutableSetIteratorAsOffset(self) 3098 3099 def iter_offset_pop(self): 3100 self._check_hashmap() 3101 return IMutableSetIteratorAsOffset(self, True) 3102 3103 def __contains__(self, obj: Any) -> bool: 3104 self._check_hashmap() 3105 item_hash = hash(obj) 3106 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3107 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3108 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3109 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3110 field_type = self.hashmap[field_type_index] 3111 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3112 return False 3113 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3114 return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]) 3115 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3116 bucket_offset = self.hashmap[item_bucket_index] 3117 try: 3118 bucket = self.buckets[item_info_index] 3119 if bucket._offset != bucket_offset: 3120 raise KeyError 3121 except KeyError: 3122 raise 3123 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3124 3125 for bucket_item_index in range(0, len(bucket), len(MutableSetBucketOffsets)): 3126 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3127 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3128 continue 3129 3130 bucket_field_hash = bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] 3131 bucket_obj = bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] 3132 if (item_hash == bucket_field_hash) and (obj == bucket_obj): 3133 return True 3134 3135 return False 3136 else: 3137 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3138 3139 def add(self, value): 3140 """Add an element.""" 3141 self._check_hashmap() 3142 item = value 3143 item_hash = hash(item) 3144 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3145 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3146 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3147 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3148 field_type = self.hashmap[field_type_index] 3149 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3150 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tobj.value 3151 self.hashmap[item_hash_index] = item_hash 3152 self.hashmap[item_bucket_index] = item 3153 self._increase_size() 3154 return 3155 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3156 if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap[item_bucket_index]): 3157 return 3158 3159 self._increase_refresh_counter() 3160 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 3161 bucket = cast(IList, bucket) 3162 bucket.set_capacity(len(MutableSetBucketOffsets)) 3163 bucket.extend_with(len(MutableSetBucketOffsets), 0) 3164 self.buckets[item_info_index] = bucket 3165 bucket[MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3166 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableSetBucketOffsets.field_hash.value) 3167 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableSetBucketOffsets.obj.value) 3168 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tbucket.value 3169 self.hashmap[item_bucket_index] = bucket_offset 3170 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3171 bucket.append(item_hash) 3172 bucket.append(item) 3173 self._increase_size() 3174 return 3175 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3176 bucket_offset = self.hashmap[item_bucket_index] 3177 try: 3178 bucket = self.buckets[item_info_index] 3179 if bucket._offset != bucket_offset: 3180 raise KeyError 3181 except KeyError: 3182 raise 3183 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3184 3185 bucket_len: int = len(bucket) 3186 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3187 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3188 if MutableSetBucketFieldTypes.tobj.value == bucket_field_type: 3189 if (item_hash == bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value]) and (item == bucket[bucket_item_index + MutableSetBucketOffsets.obj.value]): 3190 return 3191 3192 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3193 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3194 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3195 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3196 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = item_hash 3197 bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] = item 3198 self._increase_size() 3199 return 3200 else: 3201 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3202 bucket.append(item_hash) 3203 bucket.append(item) 3204 self._increase_size() 3205 return 3206 else: 3207 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3208 3209 def add_as_offset(self, value_hash, value_type, value_offset): 3210 """Add an element.""" 3211 self._check_hashmap() 3212 item = (value_type, value_offset) 3213 item_hash = value_hash 3214 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3215 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3216 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3217 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3218 field_type = self.hashmap[field_type_index] 3219 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3220 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tobj.value 3221 self.hashmap[item_hash_index] = item_hash 3222 self.hashmap.setitem_as_offset(item_bucket_index, item) 3223 self._increase_size() 3224 return 3225 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3226 if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap.getitem_as_offset(item_bucket_index)): 3227 return 3228 3229 self._increase_refresh_counter() 3230 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 3231 bucket = cast(IList, bucket) 3232 bucket.set_capacity(len(MutableSetBucketOffsets)) 3233 bucket.extend_with(len(MutableSetBucketOffsets), 0) 3234 self.buckets[item_info_index] = bucket 3235 bucket[MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3236 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableSetBucketOffsets.field_hash.value) 3237 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableSetBucketOffsets.obj.value) 3238 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tbucket.value 3239 self.hashmap[item_bucket_index] = bucket_offset 3240 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3241 bucket.append(item_hash) 3242 bucket.append_as_offset(item) 3243 self._increase_size() 3244 return 3245 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3246 bucket_offset = self.hashmap[item_bucket_index] 3247 try: 3248 bucket = self.buckets[item_info_index] 3249 if bucket._offset != bucket_offset: 3250 raise KeyError 3251 except KeyError: 3252 raise 3253 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3254 3255 bucket_len: int = len(bucket) 3256 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3257 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3258 if MutableSetBucketFieldTypes.tobj.value == bucket_field_type: 3259 if (item_hash == bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value]) and (item == bucket.getitem_as_offset(bucket_item_index + MutableSetBucketOffsets.obj.value)): 3260 return 3261 3262 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3263 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3264 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3265 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3266 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = item_hash 3267 bucket.setitem_as_offset(bucket_item_index + MutableSetBucketOffsets.obj.value, item) 3268 self._increase_size() 3269 return 3270 else: 3271 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3272 bucket.append(item_hash) 3273 bucket.append_as_offset(item) 3274 self._increase_size() 3275 return 3276 else: 3277 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3278 3279 def discard(self, value): 3280 """Remove an element. Do not raise an exception if absent.""" 3281 self._check_hashmap() 3282 obj = value 3283 item_hash = hash(obj) 3284 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3285 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3286 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3287 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3288 field_type = self.hashmap[field_type_index] 3289 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3290 return 3291 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3292 if (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]): 3293 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tnone.value 3294 self.hashmap[item_hash_index] = None 3295 self.hashmap[item_bucket_index] = None 3296 self._decrease_size() 3297 return 3298 else: 3299 return 3300 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3301 bucket_offset = self.hashmap[item_bucket_index] 3302 try: 3303 bucket = self.buckets[item_info_index] 3304 if bucket._offset != bucket_offset: 3305 raise KeyError 3306 except KeyError: 3307 raise 3308 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3309 3310 for bucket_item_index in range(0, len(bucket), len(MutableSetBucketOffsets)): 3311 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3312 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3313 continue 3314 3315 bucket_field_hash = bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] 3316 bucket_obj = bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] 3317 if (item_hash == bucket_field_hash) and (obj == bucket_obj): 3318 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tnone.value 3319 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = None 3320 bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] = None 3321 self._decrease_size() 3322 return 3323 return 3324 else: 3325 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3326 3327 @property 3328 def hash_bits(self) -> int: 3329 return self._hash_bits 3330 3331 @hash_bits.setter 3332 def hash_bits(self, value: int) -> None: 3333 self._hash_bits = value 3334 self._capacity = 2 ** value 3335 3336 @property 3337 def capacity(self) -> int: 3338 return self._capacity 3339 3340 @capacity.setter 3341 def capacity(self, value: int) -> None: 3342 if value <= self._capacity: 3343 return 3344 3345 if value <= 2: 3346 self.hash_bits = 1 3347 else: 3348 self.hash_bits = int(ceil(log2(value))) 3349 3350 def __str__(self) -> str: 3351 self._check_hashmap() 3352 return set(self).__str__() 3353 3354 def __repr__(self) -> str: 3355 self._check_hashmap() 3356 return set(self).__repr__() 3357 3358 def _free_mem(self): 3359 if self._offset is not None: 3360 for _, bucket in self.buckets.items(): 3361 self._shared_memory.destroy_obj(bucket._offset) 3362 3363 self.buckets.clear() 3364 if self.hashmap_offset is not None: 3365 self._shared_memory.destroy_obj(self.hashmap_offset) 3366 self.hashmap_offset = None 3367 3368 self._shared_memory.free(self._offset) 3369 self._offset = None
A mutable set is a finite, iterable container.
This class provides concrete generic implementations of all methods except for __contains__, __iter__, __len__, add(), and discard().
To override the comparisons (presumably for speed, as the semantics are fixed), all you have to do is redefine __le__ and then the other operations will automatically follow suit.
2788 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableSet = None) -> None: 2789 self._shared_memory = shared_memory 2790 self._base_address = shared_memory.base_address 2791 self._obj_size = None 2792 self._offset: Offset = None 2793 self._offset__data: Offset = None 2794 self._offset__size_offset: Offset = None 2795 self._offset__capacity_offset: Offset = None 2796 self._offset__hashmap_offset: Offset = None 2797 self._offset__refresh_counter_offset: Offset = None 2798 self._load_factor = 0.75 2799 self._load_factor_2 = 0.5625 2800 self._hash_bits: int = None 2801 self._capacity: int = None 2802 self._min_capacity: int = None 2803 self._size: int = None 2804 self.hashmap: IList = None 2805 self._refresh_counter: int = 0 2806 self.hashmap_offset: Offset = None 2807 self.buckets: Dict[int, IList] = dict() 2808 2809 self.ignore_rehash: bool = True 2810 2811 if offset is None: 2812 if obj is None: 2813 # obj = frozenset(set()) 2814 data_len = 16 2815 else: 2816 data_len = len(obj) 2817 2818 self._size = 0 2819 self.hash_bits = 1 2820 self.capacity = int(ceil(data_len / self._load_factor)) 2821 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2822 2823 offset, self._obj_size = shared_memory.malloc(ObjectType.tmutableset, bs * len(MutableSetOffsets)) 2824 try: 2825 self._offset = offset 2826 offset__data = offset + bs * len(BaseObjOffsets) 2827 self._offset__data = offset__data 2828 self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size.value 2829 self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity.value 2830 self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset.value 2831 self._offset__refresh_counter_offset = offset__data + bs * MutableSetOffsets.refresh_counter.value 2832 2833 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 2834 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 2835 write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter) 2836 2837 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 2838 self.hashmap = cast(IList, self.hashmap) 2839 self.hashmap_offset = hashmap_offset 2840 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 2841 hashmap_capacity = self.capacity * len(MutableSetHashmapItemOffsets) 2842 self.hashmap.set_capacity(hashmap_capacity) 2843 self.hashmap.extend_with(hashmap_capacity, 0) 2844 hash_bits: int = self.hash_bits 2845 if obj is None: 2846 pass 2847 elif isinstance(obj, IMutableSet): 2848 self._move_from(obj) 2849 else: 2850 for item in obj: 2851 self.add(item) 2852 2853 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 2854 2855 self.ignore_rehash = False 2856 except: 2857 self._free_mem() 2858 raise 2859 else: 2860 self._refresh_hashmap(offset) 2861 self.ignore_rehash = False 2862 2863 # self._offset = offset 2864 # offset__data = offset + bs * len(BaseObjOffsets) 2865 # self._offset__data = offset__data 2866 # self._offset__size_offset: Offset = offset__data + bs * MutableSetOffsets.size 2867 # self._offset__capacity_offset: Offset = offset__data + bs * MutableSetOffsets.capacity 2868 # self._offset__hashmap_offset = offset__data + bs * MutableSetOffsets.hashmap_offset 2869 2870 # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 2871 # self.hash_bits = 1 2872 # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 2873 # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 2874 # self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 2875 2876 # self.hashmap_offset = hashmap_offset 2877 # self.hashmap = IList(shared_memory, hashmap_offset) 2878 # item_info_index: int = 0 2879 # # for item_info_index in range(self.capacity): 2880 # # field_type_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_type.value 2881 # # item_hash_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.field_hash.value 2882 # # item_bucket_index = item_info_index * len(MutableSetHashmapItemOffsets) + MutableSetHashmapItemOffsets.obj_or_bucket.value 2883 # # field_type = self.hashmap[field_type_index] 2884 # # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2885 # # continue 2886 # # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2887 # # continue 2888 # # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2889 # # bucket_offset = self.hashmap[item_bucket_index] 2890 # # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2891 # # else: 2892 # # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2893 2894 # for item_info_index in range(0, self.capacity * len(MutableSetHashmapItemOffsets), len(MutableSetHashmapItemOffsets)): 2895 # field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 2896 # item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 2897 # item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 2898 # field_type = self.hashmap[field_type_index] 2899 # if MutableSetHashmapFieldTypes.tnone.value == field_type: 2900 # continue 2901 # elif MutableSetHashmapFieldTypes.tobj.value == field_type: 2902 # continue 2903 # elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 2904 # bucket_offset = self.hashmap[item_bucket_index] 2905 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 2906 # else: 2907 # raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 2908 2909 # self.ignore_rehash = False
3139 def add(self, value): 3140 """Add an element.""" 3141 self._check_hashmap() 3142 item = value 3143 item_hash = hash(item) 3144 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3145 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3146 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3147 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3148 field_type = self.hashmap[field_type_index] 3149 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3150 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tobj.value 3151 self.hashmap[item_hash_index] = item_hash 3152 self.hashmap[item_bucket_index] = item 3153 self._increase_size() 3154 return 3155 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3156 if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap[item_bucket_index]): 3157 return 3158 3159 self._increase_refresh_counter() 3160 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 3161 bucket = cast(IList, bucket) 3162 bucket.set_capacity(len(MutableSetBucketOffsets)) 3163 bucket.extend_with(len(MutableSetBucketOffsets), 0) 3164 self.buckets[item_info_index] = bucket 3165 bucket[MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3166 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableSetBucketOffsets.field_hash.value) 3167 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableSetBucketOffsets.obj.value) 3168 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tbucket.value 3169 self.hashmap[item_bucket_index] = bucket_offset 3170 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3171 bucket.append(item_hash) 3172 bucket.append(item) 3173 self._increase_size() 3174 return 3175 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3176 bucket_offset = self.hashmap[item_bucket_index] 3177 try: 3178 bucket = self.buckets[item_info_index] 3179 if bucket._offset != bucket_offset: 3180 raise KeyError 3181 except KeyError: 3182 raise 3183 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3184 3185 bucket_len: int = len(bucket) 3186 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3187 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3188 if MutableSetBucketFieldTypes.tobj.value == bucket_field_type: 3189 if (item_hash == bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value]) and (item == bucket[bucket_item_index + MutableSetBucketOffsets.obj.value]): 3190 return 3191 3192 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3193 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3194 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3195 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3196 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = item_hash 3197 bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] = item 3198 self._increase_size() 3199 return 3200 else: 3201 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3202 bucket.append(item_hash) 3203 bucket.append(item) 3204 self._increase_size() 3205 return 3206 else: 3207 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
Add an element.
3209 def add_as_offset(self, value_hash, value_type, value_offset): 3210 """Add an element.""" 3211 self._check_hashmap() 3212 item = (value_type, value_offset) 3213 item_hash = value_hash 3214 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3215 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3216 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3217 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3218 field_type = self.hashmap[field_type_index] 3219 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3220 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tobj.value 3221 self.hashmap[item_hash_index] = item_hash 3222 self.hashmap.setitem_as_offset(item_bucket_index, item) 3223 self._increase_size() 3224 return 3225 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3226 if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap.getitem_as_offset(item_bucket_index)): 3227 return 3228 3229 self._increase_refresh_counter() 3230 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 3231 bucket = cast(IList, bucket) 3232 bucket.set_capacity(len(MutableSetBucketOffsets)) 3233 bucket.extend_with(len(MutableSetBucketOffsets), 0) 3234 self.buckets[item_info_index] = bucket 3235 bucket[MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3236 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableSetBucketOffsets.field_hash.value) 3237 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableSetBucketOffsets.obj.value) 3238 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tbucket.value 3239 self.hashmap[item_bucket_index] = bucket_offset 3240 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3241 bucket.append(item_hash) 3242 bucket.append_as_offset(item) 3243 self._increase_size() 3244 return 3245 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3246 bucket_offset = self.hashmap[item_bucket_index] 3247 try: 3248 bucket = self.buckets[item_info_index] 3249 if bucket._offset != bucket_offset: 3250 raise KeyError 3251 except KeyError: 3252 raise 3253 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3254 3255 bucket_len: int = len(bucket) 3256 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3257 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3258 if MutableSetBucketFieldTypes.tobj.value == bucket_field_type: 3259 if (item_hash == bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value]) and (item == bucket.getitem_as_offset(bucket_item_index + MutableSetBucketOffsets.obj.value)): 3260 return 3261 3262 for bucket_item_index in range(0, bucket_len, len(MutableSetBucketOffsets)): 3263 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3264 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3265 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tobj.value 3266 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = item_hash 3267 bucket.setitem_as_offset(bucket_item_index + MutableSetBucketOffsets.obj.value, item) 3268 self._increase_size() 3269 return 3270 else: 3271 bucket.append(MutableSetBucketFieldTypes.tobj.value) 3272 bucket.append(item_hash) 3273 bucket.append_as_offset(item) 3274 self._increase_size() 3275 return 3276 else: 3277 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
Add an element.
3279 def discard(self, value): 3280 """Remove an element. Do not raise an exception if absent.""" 3281 self._check_hashmap() 3282 obj = value 3283 item_hash = hash(obj) 3284 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableSetHashmapItemOffsets) 3285 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3286 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3287 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3288 field_type = self.hashmap[field_type_index] 3289 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3290 return 3291 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3292 if (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]): 3293 self.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tnone.value 3294 self.hashmap[item_hash_index] = None 3295 self.hashmap[item_bucket_index] = None 3296 self._decrease_size() 3297 return 3298 else: 3299 return 3300 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3301 bucket_offset = self.hashmap[item_bucket_index] 3302 try: 3303 bucket = self.buckets[item_info_index] 3304 if bucket._offset != bucket_offset: 3305 raise KeyError 3306 except KeyError: 3307 raise 3308 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 3309 3310 for bucket_item_index in range(0, len(bucket), len(MutableSetBucketOffsets)): 3311 bucket_field_type = bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] 3312 if MutableSetBucketFieldTypes.tnone.value == bucket_field_type: 3313 continue 3314 3315 bucket_field_hash = bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] 3316 bucket_obj = bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] 3317 if (item_hash == bucket_field_hash) and (obj == bucket_obj): 3318 bucket[bucket_item_index + MutableSetBucketOffsets.field_type.value] = MutableSetBucketFieldTypes.tnone.value 3319 bucket[bucket_item_index + MutableSetBucketOffsets.field_hash.value] = None 3320 bucket[bucket_item_index + MutableSetBucketOffsets.obj.value] = None 3321 self._decrease_size() 3322 return 3323 return 3324 else: 3325 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
Remove an element. Do not raise an exception if absent.
Inherited Members
- collections.abc.MutableSet
- remove
- pop
- clear
- collections.abc.Set
- isdisjoint
3372class IMutableSetIterator: 3373 def __init__(self, iset: IMutableSet) -> None: 3374 self._iset = iset 3375 self._index = 0 3376 self._sub_index = 0 3377 3378 def __next__(self): 3379 if self._iset._check_hashmap(): 3380 raise RuntimeError("Sets's hashmap changed during iteration") 3381 3382 while self._index < self._iset.capacity: 3383 item_info_index: int = self._index * len(MutableSetHashmapItemOffsets) 3384 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3385 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3386 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3387 field_type = self._iset.hashmap[field_type_index] 3388 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3389 self._index += 1 3390 continue 3391 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3392 result = self._iset.hashmap[item_bucket_index] 3393 self._index += 1 3394 return result 3395 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3396 bucket_offset = self._iset.hashmap[item_bucket_index] 3397 try: 3398 bucket = self._iset.buckets[item_info_index] 3399 if bucket._offset != bucket_offset: 3400 raise KeyError 3401 except KeyError: 3402 raise 3403 self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset) 3404 3405 bucket_len = len(bucket) 3406 sub_item_info_index = self._sub_index 3407 while (sub_item_info_index * len(MutableSetBucketOffsets)) < bucket_len: 3408 sub_item_field_type_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_type.value 3409 if bucket[sub_item_field_type_index] == MutableSetBucketFieldTypes.tnone.value: 3410 sub_item_info_index += 1 3411 continue 3412 3413 sub_item_hash_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_hash.value 3414 sub_item_obj_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.obj.value 3415 result = bucket[sub_item_obj_index] 3416 self._sub_index += 1 3417 return result 3418 else: 3419 self._sub_index = 0 3420 self._index += 1 3421 continue 3422 else: 3423 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3424 else: 3425 raise StopIteration 3426 3427 def __iter__(self): 3428 return self
3431class IMutableSetIteratorAsOffset: 3432 def __init__(self, iset: IMutableSet, pop: bool = False) -> None: 3433 self._iset = iset 3434 self._pop: bool = pop 3435 self._index = 0 3436 self._sub_index = 0 3437 3438 def __next__(self): 3439 if self._iset._check_hashmap(): 3440 raise RuntimeError("Set's hashmap changed during iteration") 3441 3442 if self._index < self._iset.capacity: 3443 while self._index < self._iset.capacity: 3444 item_info_index: int = self._index * len(MutableSetHashmapItemOffsets) 3445 field_type_index = item_info_index + MutableSetHashmapItemOffsets.field_type.value 3446 item_hash_index = item_info_index + MutableSetHashmapItemOffsets.field_hash.value 3447 item_bucket_index = item_info_index + MutableSetHashmapItemOffsets.obj_or_bucket.value 3448 field_type = self._iset.hashmap[field_type_index] 3449 if MutableSetHashmapFieldTypes.tnone.value == field_type: 3450 self._index += 1 3451 continue 3452 elif MutableSetHashmapFieldTypes.tobj.value == field_type: 3453 item_hash = self._iset.hashmap[item_hash_index] 3454 value_type, value_offset = self._iset.hashmap.getitem_as_offset(item_bucket_index) 3455 if self._pop: 3456 self._iset.hashmap[field_type_index] = MutableSetHashmapFieldTypes.tnone.value 3457 self._iset.hashmap[item_hash_index] = None 3458 self._iset.hashmap.setitem_as_offset(item_bucket_index, (InternalListFieldTypes.tnone.value, 0), False) 3459 3460 self._index += 1 3461 return (item_hash, value_type, value_offset) 3462 elif MutableSetHashmapFieldTypes.tbucket.value == field_type: 3463 bucket_offset = self._iset.hashmap[item_bucket_index] 3464 try: 3465 bucket = self._iset.buckets[item_info_index] 3466 if bucket._offset != bucket_offset: 3467 raise KeyError 3468 except KeyError: 3469 raise 3470 self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset) 3471 3472 bucket_len = len(bucket) 3473 sub_item_info_index = self._sub_index 3474 while (sub_item_info_index * len(MutableSetBucketOffsets)) < bucket_len: 3475 sub_item_field_type_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_type.value 3476 if bucket[sub_item_field_type_index] == MutableSetBucketFieldTypes.tnone.value: 3477 sub_item_info_index += 1 3478 continue 3479 3480 sub_item_hash_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.field_hash.value 3481 sub_item_obj_index = sub_item_info_index * len(MutableSetBucketOffsets) + MutableSetBucketOffsets.obj.value 3482 sub_item_hash = bucket[sub_item_hash_index] 3483 sub_item_value_type, sub_item_value_offset = bucket.getitem_as_offset(sub_item_obj_index) 3484 if self._pop: 3485 bucket[sub_item_field_type_index] = MutableSetHashmapFieldTypes.tnone.value 3486 bucket[sub_item_hash_index] = None 3487 bucket.setitem_as_offset(sub_item_obj_index, (InternalListFieldTypes.tnone.value, 0), False) 3488 3489 self._sub_index += 1 3490 return (sub_item_hash, sub_item_value_type, sub_item_value_offset) 3491 else: 3492 self._sub_index = 0 3493 self._index += 1 3494 continue 3495 else: 3496 raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3497 else: 3498 raise StopIteration 3499 else: 3500 raise StopIteration 3501 3502 def __iter__(self): 3503 return self
3506class TMutableSet: 3507 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[IMutableSet, Offset, Size]: 3508 obj: IMutableSet = IMutableSet(shared_memory, obj=obj) 3509 return obj, obj._offset, obj._obj_size 3510 3511 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableSet: 3512 if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset): 3513 raise WrongObjectTypeError 3514 3515 return IMutableSet(shared_memory, offset) 3516 3517 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 3518 if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset): 3519 raise WrongObjectTypeError 3520 3521 obj: IMutableSet = IMutableSet(shared_memory, offset) 3522 obj._free_mem()
Inherited Members
- builtins.dict
- get
- setdefault
- pop
- popitem
- keys
- items
- values
- update
- fromkeys
- clear
- copy
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
3550class MappingHashmapItemOffsets(IntEnum): 3551 field_type = 0 3552 field_hash = 1 3553 key_or_bucket = 2 3554 value_or_none = 3
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
3563class IMapping(BaseIObject, AbsMapping): 3564 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_offset__refresh_counter_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', '_refresh_counter', 'hashmap_offset', 'buckets', 'ignore_rehash') 3565 3566 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMapping = None) -> None: 3567 self._shared_memory = shared_memory 3568 self._base_address = shared_memory.base_address 3569 self._obj_size = None 3570 self._offset: Offset = None 3571 self._offset__data: Offset = None 3572 self._offset__size_offset: Offset = None 3573 self._offset__capacity_offset: Offset = None 3574 self._offset__hashmap_offset: Offset = None 3575 self._load_factor = 0.75 3576 self._hash_bits: int = None 3577 self._capacity: int = None 3578 self._size: int = None 3579 self.hashmap: IList = None 3580 self.hashmap_offset: Offset = None 3581 self.buckets: Dict[int, IList] = dict() 3582 3583 if offset is None: 3584 if obj is None: 3585 # obj = frozenset(set()) 3586 data_len = 16 3587 else: 3588 data_len = len(obj) 3589 3590 self._size: int = data_len 3591 self.hash_bits = 1 3592 self.capacity = int(ceil(data_len / self._load_factor)) 3593 3594 offset, self._obj_size = shared_memory.malloc(ObjectType.tmapping, bs * len(MappingOffsets)) 3595 try: 3596 self._offset = offset 3597 offset__data = offset + bs * len(BaseObjOffsets) 3598 self._offset__data = offset__data 3599 self._offset__size_offset: Offset = offset__data + bs * MappingOffsets.size.value 3600 self._offset__capacity_offset: Offset = offset__data + bs * MappingOffsets.capacity.value 3601 self._offset__hashmap_offset = offset__data + bs * MappingOffsets.hashmap_offset.value 3602 3603 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 3604 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 3605 3606 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 3607 self.hashmap = cast(IList, self.hashmap) 3608 self.hashmap_offset = hashmap_offset 3609 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 3610 hashmap_capacity = self.capacity * len(MappingHashmapItemOffsets) 3611 self.hashmap.set_capacity(hashmap_capacity) 3612 self.hashmap.extend_with(hashmap_capacity, 0) 3613 hash_bits: int = self.hash_bits 3614 if obj is not None: 3615 for key, value in obj.items(): 3616 key_hash = hash(key) 3617 item_info_index: int = mask_least_significant_bits(key_hash, hash_bits) * len(MappingHashmapItemOffsets) 3618 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3619 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3620 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3621 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3622 field_type = self.hashmap[field_type_index] 3623 if MappingHashmapFieldTypes.tnone.value == field_type: 3624 self.hashmap[field_type_index] = MappingHashmapFieldTypes.tobj.value 3625 self.hashmap[item_hash_index] = key_hash 3626 self.hashmap[item_bucket_index] = key 3627 self.hashmap[item_value_index] = value 3628 elif MappingHashmapFieldTypes.tobj.value == field_type: 3629 bucket, bucket_offset, _ = shared_memory.put_obj(list()) 3630 bucket = cast(IList, bucket) 3631 bucket.set_capacity(len(MappingBucketOffsets)) 3632 bucket.extend_with(len(MappingBucketOffsets), 0) 3633 self.buckets[item_info_index] = bucket 3634 self.hashmap.move_item_to_list(item_hash_index, bucket, MappingBucketOffsets.field_hash.value) 3635 self.hashmap.move_item_to_list(item_bucket_index, bucket, MappingBucketOffsets.key_obj.value) 3636 self.hashmap.move_item_to_list(item_value_index, bucket, MappingBucketOffsets.value_obj.value) 3637 self.hashmap[field_type_index] = MappingHashmapFieldTypes.tbucket.value 3638 self.hashmap[item_bucket_index] = bucket_offset 3639 bucket.append(key_hash) 3640 bucket.append(key) 3641 bucket.append(value) 3642 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3643 bucket = self.buckets[item_info_index] 3644 bucket.append(key_hash) 3645 bucket.append(key) 3646 bucket.append(value) 3647 else: 3648 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3649 3650 # print(f'Constructed {self.hashmap=}') 3651 # print(f'\tConstructed buckets:') 3652 # pdi(self.buckets) 3653 # for bucket_index, bucket in self.buckets.items(): 3654 # pdi(bucket) 3655 # print(f'\t\t{bucket_index}:', bucket) 3656 except: 3657 self._free_mem() 3658 raise 3659 else: 3660 self._offset = offset 3661 offset__data = offset + bs * len(BaseObjOffsets) 3662 self._offset__data = offset__data 3663 self._offset__size_offset: Offset = offset__data + bs * MappingOffsets.size.value 3664 self._offset__capacity_offset: Offset = offset__data + bs * MappingOffsets.capacity.value 3665 self._offset__hashmap_offset = offset__data + bs * MappingOffsets.hashmap_offset.value 3666 3667 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 3668 self.hash_bits = 1 3669 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 3670 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 3671 3672 self.hashmap_offset = hashmap_offset 3673 self.hashmap = IList(shared_memory, hashmap_offset) 3674 # print(f'Adopted by {type(self)}: {self.hashmap=}') 3675 item_info_index: int = 0 3676 # for item_info_index in range(self.capacity): 3677 # field_type_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_type.value 3678 # item_hash_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_hash.value 3679 # item_bucket_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.key_or_bucket.value 3680 # item_value_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.value_or_none.value 3681 # field_type = self.hashmap[field_type_index] 3682 # if MappingHashmapFieldTypes.tnone.value == field_type: 3683 # continue 3684 # elif MappingHashmapFieldTypes.tobj.value == field_type: 3685 # continue 3686 # elif MappingHashmapFieldTypes.tbucket.value == field_type: 3687 # bucket_offset = self.hashmap[item_bucket_index] 3688 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 3689 # else: 3690 # raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3691 3692 for item_info_index in range(0, self.capacity * len(MappingHashmapItemOffsets), len(MappingHashmapItemOffsets)): 3693 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3694 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3695 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3696 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3697 field_type = self.hashmap[field_type_index] 3698 if MappingHashmapFieldTypes.tnone.value == field_type: 3699 continue 3700 elif MappingHashmapFieldTypes.tobj.value == field_type: 3701 continue 3702 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3703 bucket_offset = self.hashmap[item_bucket_index] 3704 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 3705 else: 3706 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3707 3708 # print(f'\tAdopted buckets:') 3709 # pdi(self.buckets) 3710 # for bucket_index, bucket in self.buckets.items(): 3711 # pdi(bucket) 3712 # print(f'\t\t{bucket_index}:', bucket) 3713 3714 def __len__(self): 3715 return self._size 3716 3717 def __iter__(self): 3718 return IMappingIterator(self) 3719 3720 # def __contains__(self, obj: Hashable) -> bool: 3721 # item_hash = hash(obj) 3722 # item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) 3723 # field_type_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_type.value 3724 # item_hash_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_hash.value 3725 # item_bucket_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.key_or_bucket.value 3726 # item_value_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.value_or_none.value 3727 # field_type = self.hashmap[field_type_index] 3728 # if MappingHashmapFieldTypes.tnone.value == field_type: 3729 # return False 3730 # elif MappingHashmapFieldTypes.tobj.value == field_type: 3731 # return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]) 3732 # elif MappingHashmapFieldTypes.tbucket.value == field_type: 3733 # bucket = self.buckets[item_info_index] 3734 # # for sub_item_info_index in range(len(bucket)): 3735 # for sub_item_info_index in range(0, len(bucket) * len(MappingBucketOffsets), len(MappingBucketOffsets)): 3736 # sub_item_hash_index = sub_item_info_index + MappingBucketOffsets.field_hash.value 3737 # sub_item_key_obj_index = sub_item_info_index + MappingBucketOffsets.key_obj.value 3738 # sub_item_value_obj_index = sub_item_info_index + MappingBucketOffsets.value_obj.value 3739 # if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_key_obj_index]): 3740 # return True 3741 3742 # return False 3743 # else: 3744 # raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3745 3746 def __getitem__(self, key: Hashable): 3747 item_hash = hash(key) 3748 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MappingHashmapItemOffsets) 3749 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3750 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3751 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3752 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3753 field_type = self.hashmap[field_type_index] 3754 if MappingHashmapFieldTypes.tnone.value == field_type: 3755 raise KeyError 3756 elif MappingHashmapFieldTypes.tobj.value == field_type: 3757 if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 3758 return self.hashmap[item_value_index] 3759 else: 3760 raise KeyError 3761 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3762 bucket = self.buckets[item_info_index] 3763 # for sub_item_info_index in range(len(bucket)): 3764 for sub_item_info_index in range(0, len(bucket) * len(MappingBucketOffsets), len(MappingBucketOffsets)): 3765 sub_item_hash_index = sub_item_info_index + MappingBucketOffsets.field_hash.value 3766 sub_item_key_obj_index = sub_item_info_index + MappingBucketOffsets.key_obj.value 3767 sub_item_value_obj_index = sub_item_info_index + MappingBucketOffsets.value_obj.value 3768 if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 3769 return bucket[sub_item_value_obj_index] 3770 3771 raise KeyError 3772 else: 3773 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3774 3775 @property 3776 def hash_bits(self) -> int: 3777 return self._hash_bits 3778 3779 @hash_bits.setter 3780 def hash_bits(self, value: int) -> None: 3781 self._hash_bits = value 3782 self._capacity = 2 ** value 3783 3784 @property 3785 def capacity(self) -> int: 3786 return self._capacity 3787 3788 @capacity.setter 3789 def capacity(self, value: int) -> None: 3790 if value <= self._capacity: 3791 return 3792 3793 if value <= 2: 3794 self.hash_bits = 1 3795 else: 3796 self.hash_bits = int(ceil(log2(value))) 3797 3798 def __str__(self) -> str: 3799 return dict(self).__str__() 3800 3801 def __repr__(self) -> str: 3802 return dict(self).__repr__() 3803 3804 def _free_mem(self): 3805 if self._offset is not None: 3806 for _, bucket in self.buckets.items(): 3807 self._shared_memory.destroy_obj(bucket._offset) 3808 3809 self.buckets.clear() 3810 if self.hashmap_offset is not None: 3811 self._shared_memory.destroy_obj(self.hashmap_offset) 3812 self.hashmap_offset = None 3813 3814 self._shared_memory.free(self._offset) 3815 self._offset = None
3566 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMapping = None) -> None: 3567 self._shared_memory = shared_memory 3568 self._base_address = shared_memory.base_address 3569 self._obj_size = None 3570 self._offset: Offset = None 3571 self._offset__data: Offset = None 3572 self._offset__size_offset: Offset = None 3573 self._offset__capacity_offset: Offset = None 3574 self._offset__hashmap_offset: Offset = None 3575 self._load_factor = 0.75 3576 self._hash_bits: int = None 3577 self._capacity: int = None 3578 self._size: int = None 3579 self.hashmap: IList = None 3580 self.hashmap_offset: Offset = None 3581 self.buckets: Dict[int, IList] = dict() 3582 3583 if offset is None: 3584 if obj is None: 3585 # obj = frozenset(set()) 3586 data_len = 16 3587 else: 3588 data_len = len(obj) 3589 3590 self._size: int = data_len 3591 self.hash_bits = 1 3592 self.capacity = int(ceil(data_len / self._load_factor)) 3593 3594 offset, self._obj_size = shared_memory.malloc(ObjectType.tmapping, bs * len(MappingOffsets)) 3595 try: 3596 self._offset = offset 3597 offset__data = offset + bs * len(BaseObjOffsets) 3598 self._offset__data = offset__data 3599 self._offset__size_offset: Offset = offset__data + bs * MappingOffsets.size.value 3600 self._offset__capacity_offset: Offset = offset__data + bs * MappingOffsets.capacity.value 3601 self._offset__hashmap_offset = offset__data + bs * MappingOffsets.hashmap_offset.value 3602 3603 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 3604 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 3605 3606 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 3607 self.hashmap = cast(IList, self.hashmap) 3608 self.hashmap_offset = hashmap_offset 3609 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 3610 hashmap_capacity = self.capacity * len(MappingHashmapItemOffsets) 3611 self.hashmap.set_capacity(hashmap_capacity) 3612 self.hashmap.extend_with(hashmap_capacity, 0) 3613 hash_bits: int = self.hash_bits 3614 if obj is not None: 3615 for key, value in obj.items(): 3616 key_hash = hash(key) 3617 item_info_index: int = mask_least_significant_bits(key_hash, hash_bits) * len(MappingHashmapItemOffsets) 3618 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3619 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3620 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3621 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3622 field_type = self.hashmap[field_type_index] 3623 if MappingHashmapFieldTypes.tnone.value == field_type: 3624 self.hashmap[field_type_index] = MappingHashmapFieldTypes.tobj.value 3625 self.hashmap[item_hash_index] = key_hash 3626 self.hashmap[item_bucket_index] = key 3627 self.hashmap[item_value_index] = value 3628 elif MappingHashmapFieldTypes.tobj.value == field_type: 3629 bucket, bucket_offset, _ = shared_memory.put_obj(list()) 3630 bucket = cast(IList, bucket) 3631 bucket.set_capacity(len(MappingBucketOffsets)) 3632 bucket.extend_with(len(MappingBucketOffsets), 0) 3633 self.buckets[item_info_index] = bucket 3634 self.hashmap.move_item_to_list(item_hash_index, bucket, MappingBucketOffsets.field_hash.value) 3635 self.hashmap.move_item_to_list(item_bucket_index, bucket, MappingBucketOffsets.key_obj.value) 3636 self.hashmap.move_item_to_list(item_value_index, bucket, MappingBucketOffsets.value_obj.value) 3637 self.hashmap[field_type_index] = MappingHashmapFieldTypes.tbucket.value 3638 self.hashmap[item_bucket_index] = bucket_offset 3639 bucket.append(key_hash) 3640 bucket.append(key) 3641 bucket.append(value) 3642 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3643 bucket = self.buckets[item_info_index] 3644 bucket.append(key_hash) 3645 bucket.append(key) 3646 bucket.append(value) 3647 else: 3648 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3649 3650 # print(f'Constructed {self.hashmap=}') 3651 # print(f'\tConstructed buckets:') 3652 # pdi(self.buckets) 3653 # for bucket_index, bucket in self.buckets.items(): 3654 # pdi(bucket) 3655 # print(f'\t\t{bucket_index}:', bucket) 3656 except: 3657 self._free_mem() 3658 raise 3659 else: 3660 self._offset = offset 3661 offset__data = offset + bs * len(BaseObjOffsets) 3662 self._offset__data = offset__data 3663 self._offset__size_offset: Offset = offset__data + bs * MappingOffsets.size.value 3664 self._offset__capacity_offset: Offset = offset__data + bs * MappingOffsets.capacity.value 3665 self._offset__hashmap_offset = offset__data + bs * MappingOffsets.hashmap_offset.value 3666 3667 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 3668 self.hash_bits = 1 3669 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 3670 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 3671 3672 self.hashmap_offset = hashmap_offset 3673 self.hashmap = IList(shared_memory, hashmap_offset) 3674 # print(f'Adopted by {type(self)}: {self.hashmap=}') 3675 item_info_index: int = 0 3676 # for item_info_index in range(self.capacity): 3677 # field_type_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_type.value 3678 # item_hash_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.field_hash.value 3679 # item_bucket_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.key_or_bucket.value 3680 # item_value_index = item_info_index * len(MappingHashmapItemOffsets) + MappingHashmapItemOffsets.value_or_none.value 3681 # field_type = self.hashmap[field_type_index] 3682 # if MappingHashmapFieldTypes.tnone.value == field_type: 3683 # continue 3684 # elif MappingHashmapFieldTypes.tobj.value == field_type: 3685 # continue 3686 # elif MappingHashmapFieldTypes.tbucket.value == field_type: 3687 # bucket_offset = self.hashmap[item_bucket_index] 3688 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 3689 # else: 3690 # raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3691 3692 for item_info_index in range(0, self.capacity * len(MappingHashmapItemOffsets), len(MappingHashmapItemOffsets)): 3693 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3694 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3695 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3696 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3697 field_type = self.hashmap[field_type_index] 3698 if MappingHashmapFieldTypes.tnone.value == field_type: 3699 continue 3700 elif MappingHashmapFieldTypes.tobj.value == field_type: 3701 continue 3702 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3703 bucket_offset = self.hashmap[item_bucket_index] 3704 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 3705 else: 3706 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3707 3708 # print(f'\tAdopted buckets:') 3709 # pdi(self.buckets) 3710 # for bucket_index, bucket in self.buckets.items(): 3711 # pdi(bucket) 3712 # print(f'\t\t{bucket_index}:', bucket)
Inherited Members
- collections.abc.Mapping
- get
- keys
- items
- values
3818class IMappingIterator: 3819 def __init__(self, imapping: IMapping) -> None: 3820 self._imapping = imapping 3821 self._index = 0 3822 self._sub_index = 0 3823 3824 def __next__(self): 3825 while self._index < self._imapping.capacity: 3826 item_info_index: int = self._index * len(MappingHashmapItemOffsets) 3827 field_type_index = item_info_index + MappingHashmapItemOffsets.field_type.value 3828 item_hash_index = item_info_index + MappingHashmapItemOffsets.field_hash.value 3829 item_bucket_index = item_info_index + MappingHashmapItemOffsets.key_or_bucket.value 3830 item_value_index = item_info_index + MappingHashmapItemOffsets.value_or_none.value 3831 field_type = self._imapping.hashmap[field_type_index] 3832 if MappingHashmapFieldTypes.tnone.value == field_type: 3833 self._index += 1 3834 continue 3835 elif MappingHashmapFieldTypes.tobj.value == field_type: 3836 result = self._imapping.hashmap[item_bucket_index] 3837 self._index += 1 3838 break 3839 elif MappingHashmapFieldTypes.tbucket.value == field_type: 3840 bucket = self._imapping.buckets[item_info_index] 3841 sub_item_info_index = self._sub_index 3842 sub_item_hash_index = sub_item_info_index * len(MappingBucketOffsets) + MappingBucketOffsets.field_hash.value 3843 sub_item_key_obj_index = sub_item_info_index * len(MappingBucketOffsets) + MappingBucketOffsets.key_obj.value 3844 sub_item_value_obj_index = sub_item_info_index * len(MappingBucketOffsets) + MappingBucketOffsets.value_obj.value 3845 if (sub_item_info_index * len(MappingBucketOffsets)) >= len(bucket): 3846 self._sub_index = 0 3847 self._index += 1 3848 continue 3849 3850 result = bucket[sub_item_key_obj_index] 3851 self._sub_index += 1 3852 break 3853 else: 3854 raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 3855 else: 3856 raise StopIteration 3857 3858 return result 3859 3860 def __iter__(self): 3861 return self
3864class TMapping: 3865 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMapping) -> Tuple[IMapping, Offset, Size]: 3866 obj: IMapping = IMapping(shared_memory, obj=obj) 3867 return obj, obj._offset, obj._obj_size 3868 3869 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMapping: 3870 if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset): 3871 raise WrongObjectTypeError 3872 3873 return IMapping(shared_memory, offset) 3874 3875 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 3876 if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset): 3877 raise WrongObjectTypeError 3878 3879 obj: IMapping = IMapping(shared_memory, offset) 3880 obj._free_mem()
3887class MutableMappingOffsets(IntEnum): 3888 size = 0 3889 capacity = 1 3890 hashmap_offset = 2 3891 refresh_counter = 3
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
3900class MutableMappingHashmapItemOffsets(IntEnum): 3901 field_type = 0 3902 field_hash = 1 3903 key_or_bucket = 2 3904 value_or_none = 3
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
3912class MutableMappingBucketOffsets(IntEnum): 3913 field_type = 0 3914 field_hash = 1 3915 key_obj = 2 3916 value_obj = 3
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
3919class IMutableMapping(BaseIObject, AbsMutableMapping): 3920 __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', 'hashmap_offset', 'buckets', '_refresh_counter', '_offset__refresh_counter_offset', 'ignore_rehash') 3921 3922 # @property 3923 # def __mro__(self) -> Tuple: 3924 # return BaseIObject, AbsMutableMapping, dict 3925 3926 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableMapping = None) -> None: 3927 self._shared_memory = shared_memory 3928 self._base_address = shared_memory.base_address 3929 self._obj_size = None 3930 self._offset: Offset = None 3931 self._offset__data: Offset = None 3932 self._offset__size_offset: Offset = None 3933 self._offset__capacity_offset: Offset = None 3934 self._offset__hashmap_offset: Offset = None 3935 self._offset__refresh_counter_offset: Offset = None 3936 self._load_factor = 0.75 3937 self._load_factor_2 = 0.5625 3938 self._hash_bits: int = None 3939 self._capacity: int = None 3940 self._min_capacity: int = None 3941 self._size: int = None 3942 self.hashmap: IList = None 3943 self._refresh_counter: int = 0 3944 self.hashmap_offset: Offset = None 3945 self.buckets: Dict[int, IList] = dict() 3946 3947 self.ignore_rehash: bool = True 3948 3949 if offset is None: 3950 if obj is None: 3951 # obj = frozenset(set()) 3952 data_len = 16 3953 else: 3954 data_len = len(obj) 3955 3956 self._size: int = 0 3957 self.hash_bits = 1 3958 self.capacity = int(ceil(data_len / self._load_factor)) 3959 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 3960 3961 offset, self._obj_size = shared_memory.malloc(ObjectType.tmutablemapping, bs * len(MutableMappingOffsets)) 3962 created_items_offsets: List[Offset] = list() 3963 try: 3964 self._offset = offset 3965 offset__data = offset + bs * len(BaseObjOffsets) 3966 self._offset__data = offset__data 3967 self._offset__size_offset = offset__data + bs * MutableMappingOffsets.size.value 3968 self._offset__capacity_offset = offset__data + bs * MutableMappingOffsets.capacity.value 3969 self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 3970 self._offset__refresh_counter_offset = offset__data + bs * MutableMappingOffsets.refresh_counter.value 3971 3972 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 3973 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 3974 write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter) 3975 3976 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 3977 self.hashmap = cast(IList, self.hashmap) 3978 self.hashmap_offset = hashmap_offset 3979 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 3980 hashmap_capacity = self.capacity * len(MutableMappingHashmapItemOffsets) 3981 self.hashmap.set_capacity(hashmap_capacity) 3982 self.hashmap.extend_with(hashmap_capacity, 0) 3983 hash_bits: int = self.hash_bits 3984 if obj is None: 3985 pass 3986 elif isinstance(obj, IMutableMapping): 3987 self._move_from(obj) 3988 else: 3989 for key, value in obj.items(): 3990 self.__setitem__(key, value) 3991 3992 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 3993 3994 self.ignore_rehash = False 3995 3996 # print(f'Constructed {self.hashmap=}') 3997 # print(f'\tConstructed buckets:') 3998 # pdi(self.buckets) 3999 # for bucket_index, bucket in self.buckets.items(): 4000 # pdi(bucket) 4001 # print(f'\t\t{bucket_index}:', bucket) 4002 except: 4003 self._free_mem() 4004 raise 4005 else: 4006 self._refresh_hashmap(offset) 4007 self.ignore_rehash = False 4008 4009 # self._offset = offset 4010 # offset__data = offset + bs * len(BaseObjOffsets) 4011 # self._offset__data = offset__data 4012 # self._offset__size_offset: Offset = offset__data + bs * MutableMappingOffsets.size.value 4013 # self._offset__capacity_offset: Offset = offset__data + bs * MutableMappingOffsets.capacity.value 4014 # self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 4015 4016 # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 4017 # self.hash_bits = 1 4018 # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 4019 # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 4020 # self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 4021 4022 # self.hashmap_offset = hashmap_offset 4023 # self.hashmap = IList(shared_memory, hashmap_offset) 4024 # # print(f'Adopted by {type(self)}: {self.hashmap=}') 4025 # item_info_index: int = 0 4026 # # for item_info_index in range(self.capacity): 4027 # # field_type_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_type.value 4028 # # item_hash_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_hash.value 4029 # # item_bucket_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.key_or_bucket.value 4030 # # item_value_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.value_or_none.value 4031 # # field_type = self.hashmap[field_type_index] 4032 # # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4033 # # continue 4034 # # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4035 # # continue 4036 # # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4037 # # bucket_offset = self.hashmap[item_bucket_index] 4038 # # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4039 # # else: 4040 # # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4041 4042 # for item_info_index in range(0, self.capacity * len(MutableMappingHashmapItemOffsets), len(MutableMappingHashmapItemOffsets)): 4043 # field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4044 # item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4045 # item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4046 # item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4047 # field_type = self.hashmap[field_type_index] 4048 # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4049 # continue 4050 # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4051 # continue 4052 # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4053 # bucket_offset = self.hashmap[item_bucket_index] 4054 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4055 # else: 4056 # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4057 4058 # self.ignore_rehash = False 4059 4060 # # print(f'\tAdopted by {type(self)} buckets:') 4061 # # pdi(self.buckets) 4062 # # for bucket_index, bucket in self.buckets.items(): 4063 # # pdi(bucket) 4064 # # print(f'\t\t{bucket_index}:', bucket) 4065 4066 def _refresh_hashmap(self, offset: Offset): 4067 # print(f'~ refresh_hashmap {offset}: {intro_func_repr_limited()}') 4068 4069 # ignore_rehash = self.ignore_rehash 4070 # self.ignore_rehash = True 4071 4072 self._hash_bits = None 4073 self._capacity = None 4074 self._min_capacity = None 4075 self._size = None 4076 self.hashmap = None 4077 self._refresh_counter = 0 4078 self.hashmap_offset = None 4079 self.buckets = dict() 4080 4081 shared_memory = self._shared_memory 4082 self._offset = offset 4083 offset__data = offset + bs * len(BaseObjOffsets) 4084 self._offset__data = offset__data 4085 self._offset__size_offset: Offset = offset__data + bs * MutableMappingOffsets.size.value 4086 self._offset__capacity_offset: Offset = offset__data + bs * MutableMappingOffsets.capacity.value 4087 self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 4088 self._offset__refresh_counter_offset = offset__data + bs * MutableMappingOffsets.refresh_counter.value 4089 4090 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 4091 self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 4092 self.hash_bits = 1 4093 self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 4094 hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 4095 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 4096 4097 self.hashmap_offset = hashmap_offset 4098 self.hashmap = IList(shared_memory, hashmap_offset) 4099 # print(f'Adopted by {type(self)}: {self.hashmap=}') 4100 # item_info_index: int = 0 4101 # for item_info_index in range(self.capacity): 4102 # field_type_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_type.value 4103 # item_hash_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_hash.value 4104 # item_bucket_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.key_or_bucket.value 4105 # item_value_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.value_or_none.value 4106 # field_type = self.hashmap[field_type_index] 4107 # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4108 # continue 4109 # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4110 # continue 4111 # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4112 # bucket_offset = self.hashmap[item_bucket_index] 4113 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4114 # else: 4115 # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4116 4117 for item_info_index in range(0, self.capacity * len(MutableMappingHashmapItemOffsets), len(MutableMappingHashmapItemOffsets)): 4118 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4119 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4120 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4121 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4122 field_type = self.hashmap[field_type_index] 4123 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4124 continue 4125 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4126 continue 4127 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4128 bucket_offset = self.hashmap[item_bucket_index] 4129 self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4130 else: 4131 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4132 4133 # self.ignore_rehash = ignore_rehash 4134 4135 @property 4136 def refresh_counter(self): 4137 return read_uint64(self._base_address, self._offset__refresh_counter_offset) 4138 4139 def _increase_refresh_counter(self): 4140 if self.ignore_rehash: 4141 # print(f'~ ignore increase_refresh_counter {self._offset}: {intro_func_repr_limited()}') 4142 pass 4143 else: 4144 # print(f'~ increase_refresh_counter {self._offset}: {intro_func_repr_limited()}') 4145 # refresh_counter = read_uint64(self._base_address, self._offset__refresh_counter_offset) 4146 # if self._refresh_counter != refresh_counter: 4147 # print('~!!! increase_refresh_counter') 4148 4149 self._refresh_counter += 1 4150 write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter) 4151 4152 def _check_hashmap(self): 4153 if self.ignore_rehash: 4154 # print(f'~ ignore check_hashmap {self._offset}: {intro_func_repr_limited()}') 4155 return False 4156 else: 4157 base_address = self._base_address 4158 refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset) 4159 # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset) 4160 # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset): 4161 if self._refresh_counter != refresh_counter: 4162 # print(f'~ check_hashmap {self._offset}: {intro_func_repr_limited()}') 4163 self._refresh_hashmap(self._offset) 4164 return True 4165 4166 return False 4167 4168 # @property 4169 # def hashmap(self) -> IList: 4170 # if self.ignore_rehash: 4171 # return self._hashmap 4172 # else: 4173 # self._check_hashmap() 4174 # return self._hashmap 4175 4176 # @hashmap.setter 4177 # def hashmap(self, value: IList): 4178 # self._hashmap = value 4179 4180 def _increase_size(self): 4181 self._size += 1 4182 write_uint64(self._base_address, self._offset__size_offset, self._size) 4183 if (self._size > self._capacity) or (self._size < self._min_capacity): 4184 self._rehash() 4185 4186 def _decrease_size(self): 4187 self._size -= 1 4188 if self._size < 0: 4189 raise RuntimeError('Size of the set is negative') 4190 4191 write_uint64(self._base_address, self._offset__size_offset, self._size) 4192 if (self._size > self._capacity) or (self._size < self._min_capacity): 4193 self._rehash() 4194 4195 def _move_from(self, other: 'IMutableMapping'): 4196 for key_hash, key_type, key_offset, value_type, value_offset in other.iter_offset_pop(): 4197 self.setitem_as_offset(key_hash, key_type, key_offset, value_type, value_offset) 4198 4199 def _rehash(self): 4200 if self.ignore_rehash: 4201 # print(f'~ ignore rehash {self._offset}: {intro_func_repr_limited()}') 4202 return 4203 4204 # print(f'~ rehash {self._offset}: {intro_func_repr_limited()}') 4205 self._increase_refresh_counter() 4206 4207 ignore_rehash = self.ignore_rehash 4208 self.ignore_rehash = True 4209 4210 new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self) 4211 new_other = cast(IMutableMapping, new_other) 4212 4213 other_capacity = new_other._capacity 4214 other_hash_bits = new_other._hash_bits 4215 other_min_capacity = new_other._min_capacity 4216 other_size = new_other._size 4217 # refresh_counter = new_other._refresh_counter 4218 other_hashmap = new_other.hashmap 4219 other_hashmap_offset = new_other.hashmap_offset 4220 other_buckets = new_other.buckets 4221 other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset) 4222 other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset) 4223 other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset) 4224 # refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset) 4225 4226 new_other._capacity = self._capacity 4227 new_other._hash_bits = self._hash_bits 4228 new_other._min_capacity = self._min_capacity 4229 new_other._size = self._size 4230 # new_other._refresh_counter = self._refresh_counter 4231 new_other.hashmap = self.hashmap 4232 new_other.hashmap_offset = self.hashmap_offset 4233 new_other.buckets = self.buckets 4234 write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset)) 4235 write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset)) 4236 write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset)) 4237 # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset)) 4238 4239 self._capacity = other_capacity 4240 self._hash_bits = other_hash_bits 4241 self._min_capacity = other_min_capacity 4242 self._size = other_size 4243 # self._refresh_counter = refresh_counter 4244 self.hashmap = other_hashmap 4245 self.hashmap_offset = other_hashmap_offset 4246 self.buckets = other_buckets 4247 write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin) 4248 write_uint64(self._base_address, self._offset__size_offset, other_size_bin) 4249 write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin) 4250 # write_uint64(self._base_address, self._offset__refresh_counter_offset, refresh_counter_bin) 4251 4252 self._shared_memory.destroy_obj(new_other_offset) 4253 4254 self.ignore_rehash = ignore_rehash 4255 4256 def __len__(self): 4257 self._check_hashmap() 4258 return self._size 4259 4260 def __iter__(self): 4261 self._check_hashmap() 4262 return IMutableMappingIterator(self) 4263 4264 def iter_offset(self): 4265 self._check_hashmap() 4266 return IMutableMappingIteratorAsOffset(self) 4267 4268 def iter_offset_pop(self): 4269 self._check_hashmap() 4270 return IMutableMappingIteratorAsOffset(self, True) 4271 4272 # def __contains__(self, key: Hashable) -> bool: 4273 # item_hash = hash(key) 4274 # item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) 4275 # field_type_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_type.value 4276 # item_hash_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_hash.value 4277 # item_bucket_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.key_or_bucket.value 4278 # item_value_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.value_or_none.value 4279 # field_type = self.hashmap[field_type_index] 4280 # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4281 # return False 4282 # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4283 # return (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]) 4284 # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4285 # bucket = self.buckets[item_info_index] 4286 # for sub_item_info_index in range(0, len(bucket), len(MutableMappingBucketOffsets)): 4287 # bucket_field_type = bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] 4288 # if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4289 # continue 4290 4291 # sub_item_hash_index = sub_item_info_index + MutableMappingBucketOffsets.field_hash.value 4292 # sub_item_key_obj_index = sub_item_info_index + MutableMappingBucketOffsets.key_obj.value 4293 # sub_item_value_obj_index = sub_item_info_index + MutableMappingBucketOffsets.value_obj.value 4294 # if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 4295 # return True 4296 4297 # return False 4298 # else: 4299 # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4300 4301 def __getitem__(self, key: Hashable): 4302 self._check_hashmap() 4303 item_hash = hash(key) 4304 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4305 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4306 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4307 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4308 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4309 field_type = self.hashmap[field_type_index] 4310 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4311 raise KeyError 4312 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4313 if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 4314 return self.hashmap[item_value_index] 4315 else: 4316 raise KeyError 4317 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4318 bucket_offset = self.hashmap[item_bucket_index] 4319 try: 4320 bucket = self.buckets[item_info_index] 4321 if bucket._offset != bucket_offset: 4322 raise KeyError 4323 except KeyError: 4324 raise 4325 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4326 4327 for sub_item_info_index in range(0, len(bucket), len(MutableMappingBucketOffsets)): 4328 bucket_field_type = bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] 4329 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4330 continue 4331 4332 sub_item_hash_index = sub_item_info_index + MutableMappingBucketOffsets.field_hash.value 4333 sub_item_key_obj_index = sub_item_info_index + MutableMappingBucketOffsets.key_obj.value 4334 sub_item_value_obj_index = sub_item_info_index + MutableMappingBucketOffsets.value_obj.value 4335 if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 4336 return bucket[sub_item_value_obj_index] 4337 4338 raise KeyError 4339 else: 4340 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4341 4342 def __setitem__(self, key, value): 4343 self._check_hashmap() 4344 key_hash = hash(key) 4345 item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4346 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4347 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4348 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4349 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4350 field_type = self.hashmap[field_type_index] 4351 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4352 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tobj.value 4353 self.hashmap[item_hash_index] = key_hash 4354 self.hashmap[item_bucket_index] = key 4355 self.hashmap[item_value_index] = value 4356 self._increase_size() 4357 return 4358 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4359 if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 4360 self.hashmap[item_value_index] = value 4361 return 4362 4363 self._increase_refresh_counter() 4364 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 4365 bucket = cast(IList, bucket) 4366 bucket.set_capacity(len(MutableMappingBucketOffsets)) 4367 bucket.extend_with(len(MutableMappingBucketOffsets), 0) 4368 self.buckets[item_info_index] = bucket 4369 bucket[MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4370 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableMappingBucketOffsets.field_hash.value) 4371 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableMappingBucketOffsets.key_obj.value) 4372 self.hashmap.move_item_to_list(item_value_index, bucket, MutableMappingBucketOffsets.value_obj.value) 4373 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tbucket.value 4374 self.hashmap[item_bucket_index] = bucket_offset 4375 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4376 bucket.append(key_hash) 4377 bucket.append(key) 4378 bucket.append(value) 4379 self._increase_size() 4380 return 4381 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4382 bucket_offset = self.hashmap[item_bucket_index] 4383 try: 4384 bucket = self.buckets[item_info_index] 4385 if bucket._offset != bucket_offset: 4386 raise KeyError 4387 except KeyError: 4388 raise 4389 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4390 4391 bucket_len: int = len(bucket) 4392 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4393 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4394 if MutableMappingBucketFieldTypes.tobj.value == bucket_field_type: 4395 if (key_hash == bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value]) and (key == bucket[bucket_item_index + MutableMappingBucketOffsets.key_obj.value]): 4396 bucket[bucket_item_index + MutableMappingBucketOffsets.value_obj.value] = value 4397 return 4398 4399 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4400 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4401 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4402 bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4403 bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value] = key_hash 4404 bucket[bucket_item_index + MutableMappingBucketOffsets.key_obj.value] = key 4405 bucket[bucket_item_index + MutableMappingBucketOffsets.value_obj.value] = value 4406 self._increase_size() 4407 return 4408 else: 4409 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4410 bucket.append(key_hash) 4411 bucket.append(key) 4412 bucket.append(value) 4413 self._increase_size() 4414 return 4415 else: 4416 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4417 4418 def setitem_as_offset(self, key_hash, key_type, key_offset, value_type, value_offset): 4419 self._check_hashmap() 4420 key = (key_type, key_offset) 4421 value = (value_type, value_offset) 4422 item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4423 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4424 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4425 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4426 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4427 field_type = self.hashmap[field_type_index] 4428 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4429 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tobj.value 4430 self.hashmap[item_hash_index] = key_hash 4431 self.hashmap.setitem_as_offset(item_bucket_index, key) 4432 self.hashmap.setitem_as_offset(item_value_index, value) 4433 self._increase_size() 4434 return 4435 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4436 if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap.getitem_as_offset(item_bucket_index)): 4437 self.hashmap.setitem_as_offset(item_value_index, value) 4438 return 4439 4440 self._increase_refresh_counter() 4441 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 4442 bucket = cast(IList, bucket) 4443 bucket.set_capacity(len(MutableMappingBucketOffsets)) 4444 bucket.extend_with(len(MutableMappingBucketOffsets), 0) 4445 self.buckets[item_info_index] = bucket 4446 bucket[MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4447 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableMappingBucketOffsets.field_hash.value) 4448 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableMappingBucketOffsets.key_obj.value) 4449 self.hashmap.move_item_to_list(item_value_index, bucket, MutableMappingBucketOffsets.value_obj.value) 4450 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tbucket.value 4451 self.hashmap[item_bucket_index] = bucket_offset 4452 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4453 bucket.append(key_hash) 4454 bucket.append_as_offset(key) 4455 bucket.append_as_offset(value) 4456 self._increase_size() 4457 return 4458 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4459 bucket_offset = self.hashmap[item_bucket_index] 4460 try: 4461 bucket = self.buckets[item_info_index] 4462 if bucket._offset != bucket_offset: 4463 raise KeyError 4464 except KeyError: 4465 raise 4466 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4467 4468 bucket_len: int = len(bucket) 4469 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4470 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4471 if MutableMappingBucketFieldTypes.tobj.value == bucket_field_type: 4472 if (key_hash == bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value]) and (key == bucket.getitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.key_obj.value)): 4473 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.value_obj.value, value) 4474 return 4475 4476 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4477 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4478 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4479 bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4480 bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value] = key_hash 4481 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.key_obj.value, key) 4482 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.value_obj.value, value) 4483 self._increase_size() 4484 return 4485 else: 4486 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4487 bucket.append(key_hash) 4488 bucket.append_as_offset(key) 4489 bucket.append_as_offset(value) 4490 self._increase_size() 4491 return 4492 else: 4493 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4494 4495 def __delitem__(self, key): 4496 self._check_hashmap() 4497 item_hash = hash(key) 4498 item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4499 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4500 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4501 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4502 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4503 field_type = self.hashmap[field_type_index] 4504 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4505 raise KeyError 4506 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4507 if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]): 4508 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tnone.value 4509 self.hashmap[item_hash_index] = None 4510 self.hashmap[item_bucket_index] = None 4511 self.hashmap[item_value_index] = None 4512 self._decrease_size() 4513 return 4514 else: 4515 raise KeyError 4516 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4517 bucket_offset = self.hashmap[item_bucket_index] 4518 try: 4519 bucket = self.buckets[item_info_index] 4520 if bucket._offset != bucket_offset: 4521 raise KeyError 4522 except KeyError: 4523 raise 4524 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4525 4526 for sub_item_info_index in range(0, len(bucket), len(MutableMappingBucketOffsets)): 4527 bucket_field_type = bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] 4528 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4529 continue 4530 4531 sub_item_hash_index = sub_item_info_index + MutableMappingBucketOffsets.field_hash.value 4532 sub_item_key_obj_index = sub_item_info_index + MutableMappingBucketOffsets.key_obj.value 4533 sub_item_value_obj_index = sub_item_info_index + MutableMappingBucketOffsets.value_obj.value 4534 if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]): 4535 bucket[sub_item_info_index + MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tnone.value 4536 bucket[sub_item_hash_index] = None 4537 bucket[sub_item_key_obj_index] = None 4538 bucket[sub_item_value_obj_index] = None 4539 self._decrease_size() 4540 return 4541 4542 raise KeyError 4543 else: 4544 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4545 4546 @property 4547 def hash_bits(self) -> int: 4548 return self._hash_bits 4549 4550 @hash_bits.setter 4551 def hash_bits(self, value: int) -> None: 4552 self._hash_bits = value 4553 self._capacity = 2 ** value 4554 4555 @property 4556 def capacity(self) -> int: 4557 return self._capacity 4558 4559 @capacity.setter 4560 def capacity(self, value: int) -> None: 4561 if value <= self._capacity: 4562 return 4563 4564 if value <= 2: 4565 self.hash_bits = 1 4566 else: 4567 self.hash_bits = int(ceil(log2(value))) 4568 4569 def __str__(self) -> str: 4570 self._check_hashmap() 4571 return dict(self).__str__() 4572 4573 def __repr__(self) -> str: 4574 self._check_hashmap() 4575 return dict(self).__repr__() 4576 4577 def _free_mem(self): 4578 if self._offset is not None: 4579 if self.hashmap_offset is not None: 4580 self._check_hashmap() 4581 4582 for _, bucket in self.buckets.items(): 4583 self._shared_memory.destroy_obj(bucket._offset) 4584 self.buckets.clear() 4585 if self.hashmap_offset is not None: 4586 self._shared_memory.destroy_obj(self.hashmap_offset) 4587 self.hashmap_offset = None 4588 4589 self._shared_memory.free(self._offset) 4590 self._offset = None
3926 def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableMapping = None) -> None: 3927 self._shared_memory = shared_memory 3928 self._base_address = shared_memory.base_address 3929 self._obj_size = None 3930 self._offset: Offset = None 3931 self._offset__data: Offset = None 3932 self._offset__size_offset: Offset = None 3933 self._offset__capacity_offset: Offset = None 3934 self._offset__hashmap_offset: Offset = None 3935 self._offset__refresh_counter_offset: Offset = None 3936 self._load_factor = 0.75 3937 self._load_factor_2 = 0.5625 3938 self._hash_bits: int = None 3939 self._capacity: int = None 3940 self._min_capacity: int = None 3941 self._size: int = None 3942 self.hashmap: IList = None 3943 self._refresh_counter: int = 0 3944 self.hashmap_offset: Offset = None 3945 self.buckets: Dict[int, IList] = dict() 3946 3947 self.ignore_rehash: bool = True 3948 3949 if offset is None: 3950 if obj is None: 3951 # obj = frozenset(set()) 3952 data_len = 16 3953 else: 3954 data_len = len(obj) 3955 3956 self._size: int = 0 3957 self.hash_bits = 1 3958 self.capacity = int(ceil(data_len / self._load_factor)) 3959 self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 3960 3961 offset, self._obj_size = shared_memory.malloc(ObjectType.tmutablemapping, bs * len(MutableMappingOffsets)) 3962 created_items_offsets: List[Offset] = list() 3963 try: 3964 self._offset = offset 3965 offset__data = offset + bs * len(BaseObjOffsets) 3966 self._offset__data = offset__data 3967 self._offset__size_offset = offset__data + bs * MutableMappingOffsets.size.value 3968 self._offset__capacity_offset = offset__data + bs * MutableMappingOffsets.capacity.value 3969 self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 3970 self._offset__refresh_counter_offset = offset__data + bs * MutableMappingOffsets.refresh_counter.value 3971 3972 write_uint64(shared_memory.base_address, self._offset__size_offset, self._size) 3973 write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity) 3974 write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter) 3975 3976 self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list()) 3977 self.hashmap = cast(IList, self.hashmap) 3978 self.hashmap_offset = hashmap_offset 3979 write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset) 3980 hashmap_capacity = self.capacity * len(MutableMappingHashmapItemOffsets) 3981 self.hashmap.set_capacity(hashmap_capacity) 3982 self.hashmap.extend_with(hashmap_capacity, 0) 3983 hash_bits: int = self.hash_bits 3984 if obj is None: 3985 pass 3986 elif isinstance(obj, IMutableMapping): 3987 self._move_from(obj) 3988 else: 3989 for key, value in obj.items(): 3990 self.__setitem__(key, value) 3991 3992 self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset) 3993 3994 self.ignore_rehash = False 3995 3996 # print(f'Constructed {self.hashmap=}') 3997 # print(f'\tConstructed buckets:') 3998 # pdi(self.buckets) 3999 # for bucket_index, bucket in self.buckets.items(): 4000 # pdi(bucket) 4001 # print(f'\t\t{bucket_index}:', bucket) 4002 except: 4003 self._free_mem() 4004 raise 4005 else: 4006 self._refresh_hashmap(offset) 4007 self.ignore_rehash = False 4008 4009 # self._offset = offset 4010 # offset__data = offset + bs * len(BaseObjOffsets) 4011 # self._offset__data = offset__data 4012 # self._offset__size_offset: Offset = offset__data + bs * MutableMappingOffsets.size.value 4013 # self._offset__capacity_offset: Offset = offset__data + bs * MutableMappingOffsets.capacity.value 4014 # self._offset__hashmap_offset = offset__data + bs * MutableMappingOffsets.hashmap_offset.value 4015 4016 # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset) 4017 # self.hash_bits = 1 4018 # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset) 4019 # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset) 4020 # self._min_capacity = int(ceil(self._capacity * self._load_factor_2)) 4021 4022 # self.hashmap_offset = hashmap_offset 4023 # self.hashmap = IList(shared_memory, hashmap_offset) 4024 # # print(f'Adopted by {type(self)}: {self.hashmap=}') 4025 # item_info_index: int = 0 4026 # # for item_info_index in range(self.capacity): 4027 # # field_type_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_type.value 4028 # # item_hash_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.field_hash.value 4029 # # item_bucket_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.key_or_bucket.value 4030 # # item_value_index = item_info_index * len(MutableMappingHashmapItemOffsets) + MutableMappingHashmapItemOffsets.value_or_none.value 4031 # # field_type = self.hashmap[field_type_index] 4032 # # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4033 # # continue 4034 # # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4035 # # continue 4036 # # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4037 # # bucket_offset = self.hashmap[item_bucket_index] 4038 # # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4039 # # else: 4040 # # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4041 4042 # for item_info_index in range(0, self.capacity * len(MutableMappingHashmapItemOffsets), len(MutableMappingHashmapItemOffsets)): 4043 # field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4044 # item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4045 # item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4046 # item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4047 # field_type = self.hashmap[field_type_index] 4048 # if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4049 # continue 4050 # elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4051 # continue 4052 # elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4053 # bucket_offset = self.hashmap[item_bucket_index] 4054 # self.buckets[item_info_index] = IList(shared_memory, bucket_offset) 4055 # else: 4056 # raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4057 4058 # self.ignore_rehash = False 4059 4060 # # print(f'\tAdopted by {type(self)} buckets:') 4061 # # pdi(self.buckets) 4062 # # for bucket_index, bucket in self.buckets.items(): 4063 # # pdi(bucket) 4064 # # print(f'\t\t{bucket_index}:', bucket)
4418 def setitem_as_offset(self, key_hash, key_type, key_offset, value_type, value_offset): 4419 self._check_hashmap() 4420 key = (key_type, key_offset) 4421 value = (value_type, value_offset) 4422 item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * len(MutableMappingHashmapItemOffsets) 4423 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4424 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4425 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4426 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4427 field_type = self.hashmap[field_type_index] 4428 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4429 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tobj.value 4430 self.hashmap[item_hash_index] = key_hash 4431 self.hashmap.setitem_as_offset(item_bucket_index, key) 4432 self.hashmap.setitem_as_offset(item_value_index, value) 4433 self._increase_size() 4434 return 4435 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4436 if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap.getitem_as_offset(item_bucket_index)): 4437 self.hashmap.setitem_as_offset(item_value_index, value) 4438 return 4439 4440 self._increase_refresh_counter() 4441 bucket, bucket_offset, _ = self._shared_memory.put_obj(list()) 4442 bucket = cast(IList, bucket) 4443 bucket.set_capacity(len(MutableMappingBucketOffsets)) 4444 bucket.extend_with(len(MutableMappingBucketOffsets), 0) 4445 self.buckets[item_info_index] = bucket 4446 bucket[MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4447 self.hashmap.move_item_to_list(item_hash_index, bucket, MutableMappingBucketOffsets.field_hash.value) 4448 self.hashmap.move_item_to_list(item_bucket_index, bucket, MutableMappingBucketOffsets.key_obj.value) 4449 self.hashmap.move_item_to_list(item_value_index, bucket, MutableMappingBucketOffsets.value_obj.value) 4450 self.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tbucket.value 4451 self.hashmap[item_bucket_index] = bucket_offset 4452 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4453 bucket.append(key_hash) 4454 bucket.append_as_offset(key) 4455 bucket.append_as_offset(value) 4456 self._increase_size() 4457 return 4458 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4459 bucket_offset = self.hashmap[item_bucket_index] 4460 try: 4461 bucket = self.buckets[item_info_index] 4462 if bucket._offset != bucket_offset: 4463 raise KeyError 4464 except KeyError: 4465 raise 4466 self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset) 4467 4468 bucket_len: int = len(bucket) 4469 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4470 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4471 if MutableMappingBucketFieldTypes.tobj.value == bucket_field_type: 4472 if (key_hash == bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value]) and (key == bucket.getitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.key_obj.value)): 4473 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.value_obj.value, value) 4474 return 4475 4476 for bucket_item_index in range(0, bucket_len, len(MutableMappingBucketOffsets)): 4477 bucket_field_type = bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] 4478 if MutableMappingBucketFieldTypes.tnone.value == bucket_field_type: 4479 bucket[bucket_item_index + MutableMappingBucketOffsets.field_type.value] = MutableMappingBucketFieldTypes.tobj.value 4480 bucket[bucket_item_index + MutableMappingBucketOffsets.field_hash.value] = key_hash 4481 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.key_obj.value, key) 4482 bucket.setitem_as_offset(bucket_item_index + MutableMappingBucketOffsets.value_obj.value, value) 4483 self._increase_size() 4484 return 4485 else: 4486 bucket.append(MutableMappingBucketFieldTypes.tobj.value) 4487 bucket.append(key_hash) 4488 bucket.append_as_offset(key) 4489 bucket.append_as_offset(value) 4490 self._increase_size() 4491 return 4492 else: 4493 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
Inherited Members
- collections.abc.MutableMapping
- pop
- popitem
- clear
- update
- setdefault
- collections.abc.Mapping
- get
- keys
- items
- values
4593class IMutableMappingIterator: 4594 def __init__(self, imapping: IMutableMapping) -> None: 4595 self._imapping = imapping 4596 self._index = 0 4597 self._sub_index = 0 4598 4599 def __next__(self): 4600 if self._imapping._check_hashmap(): 4601 raise RuntimeError("Dictionary's hashmap changed during iteration") 4602 4603 while self._index < self._imapping.capacity: 4604 item_info_index: int = self._index * len(MutableMappingHashmapItemOffsets) 4605 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4606 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4607 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4608 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4609 field_type = self._imapping.hashmap[field_type_index] 4610 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4611 self._index += 1 4612 continue 4613 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4614 result = self._imapping.hashmap[item_bucket_index] 4615 self._index += 1 4616 return result 4617 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4618 bucket_offset = self._imapping.hashmap[item_bucket_index] 4619 try: 4620 bucket = self._imapping.buckets[item_info_index] 4621 if bucket._offset != bucket_offset: 4622 raise KeyError 4623 except KeyError: 4624 raise 4625 self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset) 4626 4627 bucket_len = len(bucket) 4628 sub_item_info_index = self._sub_index 4629 while (sub_item_info_index * len(MutableMappingBucketOffsets)) < bucket_len: 4630 sub_item_field_type_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_type.value 4631 if bucket[sub_item_field_type_index] == MutableMappingBucketFieldTypes.tnone.value: 4632 sub_item_info_index += 1 4633 continue 4634 4635 sub_item_hash_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_hash.value 4636 sub_item_key_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.key_obj.value 4637 sub_item_value_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.value_obj.value 4638 result = bucket[sub_item_key_obj_index] 4639 self._sub_index += 1 4640 return result 4641 else: 4642 self._sub_index = 0 4643 self._index += 1 4644 continue 4645 else: 4646 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4647 else: 4648 raise StopIteration 4649 4650 def __iter__(self): 4651 return self
4654class IMutableMappingIteratorAsOffset: 4655 def __init__(self, imapping: IMutableMapping, pop: bool = False) -> None: 4656 self._imapping = imapping 4657 self._pop: bool = pop 4658 self._index = 0 4659 self._sub_index = 0 4660 4661 def __next__(self): 4662 if self._imapping._check_hashmap(): 4663 raise RuntimeError("Dictionary's hashmap changed during iteration") 4664 4665 while self._index < self._imapping.capacity: 4666 item_info_index: int = self._index * len(MutableMappingHashmapItemOffsets) 4667 field_type_index = item_info_index + MutableMappingHashmapItemOffsets.field_type.value 4668 item_hash_index = item_info_index + MutableMappingHashmapItemOffsets.field_hash.value 4669 item_bucket_index = item_info_index + MutableMappingHashmapItemOffsets.key_or_bucket.value 4670 item_value_index = item_info_index + MutableMappingHashmapItemOffsets.value_or_none.value 4671 field_type = self._imapping.hashmap[field_type_index] 4672 if MutableMappingHashmapFieldTypes.tnone.value == field_type: 4673 self._index += 1 4674 continue 4675 elif MutableMappingHashmapFieldTypes.tobj.value == field_type: 4676 key_hash = self._imapping.hashmap[item_hash_index] 4677 key_type, key_offset = self._imapping.hashmap.getitem_as_offset(item_bucket_index) 4678 value_type, value_offset = self._imapping.hashmap.getitem_as_offset(item_value_index) 4679 if self._pop: 4680 self._imapping.hashmap[field_type_index] = MutableMappingHashmapFieldTypes.tnone.value 4681 self._imapping.hashmap[item_hash_index] = None 4682 self._imapping.hashmap.setitem_as_offset(item_bucket_index, (InternalListFieldTypes.tnone.value, 0), False) 4683 self._imapping.hashmap.setitem_as_offset(item_value_index, (InternalListFieldTypes.tnone.value, 0), False) 4684 4685 self._index += 1 4686 return key_hash, key_type, key_offset, value_type, value_offset 4687 elif MutableMappingHashmapFieldTypes.tbucket.value == field_type: 4688 bucket_offset = self._imapping.hashmap[item_bucket_index] 4689 try: 4690 bucket = self._imapping.buckets[item_info_index] 4691 if bucket._offset != bucket_offset: 4692 raise KeyError 4693 except KeyError: 4694 raise 4695 self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset) 4696 4697 bucket_len = len(bucket) 4698 sub_item_info_index = self._sub_index 4699 while (sub_item_info_index * len(MutableMappingBucketOffsets)) < bucket_len: 4700 sub_item_field_type_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_type.value 4701 if bucket[sub_item_field_type_index] == MutableMappingBucketFieldTypes.tnone.value: 4702 sub_item_info_index += 1 4703 continue 4704 4705 sub_item_hash_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.field_hash.value 4706 sub_item_key_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.key_obj.value 4707 sub_item_value_obj_index = sub_item_info_index * len(MutableMappingBucketOffsets) + MutableMappingBucketOffsets.value_obj.value 4708 4709 key_hash = bucket[sub_item_hash_index] 4710 key_type, key_offset = bucket.getitem_as_offset(sub_item_key_obj_index) 4711 value_type, value_offset = bucket.getitem_as_offset(sub_item_value_obj_index) 4712 if self._pop: 4713 bucket[sub_item_field_type_index] = MutableMappingHashmapFieldTypes.tnone.value 4714 bucket[sub_item_hash_index] = None 4715 bucket.setitem_as_offset(sub_item_key_obj_index, (InternalListFieldTypes.tnone.value, 0), False) 4716 bucket.setitem_as_offset(sub_item_value_obj_index, (InternalListFieldTypes.tnone.value, 0), False) 4717 4718 self._sub_index += 1 4719 return key_hash, key_type, key_offset, value_type, value_offset 4720 else: 4721 self._sub_index = 0 4722 self._index += 1 4723 continue 4724 else: 4725 raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}') 4726 else: 4727 raise StopIteration 4728 4729 def __iter__(self): 4730 return self
4733class TMutableMapping: 4734 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMutableMapping) -> Tuple[IMutableMapping, Offset, Size]: 4735 obj: IMutableMapping = IMutableMapping(shared_memory, obj=obj) 4736 return obj, obj._offset, obj._obj_size 4737 4738 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableMapping: 4739 if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset): 4740 raise WrongObjectTypeError 4741 4742 return IMutableMapping(shared_memory, offset) 4743 4744 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 4745 if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset): 4746 raise WrongObjectTypeError 4747 4748 obj: IMutableMapping = IMutableMapping(shared_memory, offset) 4749 obj._free_mem()
4776class GeneralObjectOffsets(IntEnum): 4777 pickled_obj = 0 4778 obj_dict = 1 4779 setable_data_descriptor_field_names = 2
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
4782def tgeneralobject_custom_getattribute(self, name): 4783 if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'): 4784 return object.__getattribute__(self, name) 4785 4786 try: 4787 return self._tgeneralobject_imutablemapping_attributes[name] 4788 except KeyError: 4789 pass 4790 4791 return object.__getattribute__(self, name)
4794def tgeneralobject_custom_setattr(self, name, value): 4795 if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'): 4796 object.__setattr__(self, name, value) 4797 else: 4798 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 4799 object.__setattr__(self, name, value) 4800 return 4801 4802 # try: 4803 # if name in self._tgeneralobject_setable_data_descriptor_field_names: 4804 # object.__setattr__(self, name, value) 4805 # except AttributeError: 4806 # pass 4807 4808 self._tgeneralobject_imutablemapping_attributes[name] = value
4811def tgeneralobject_custom_delattr(self, name): 4812 if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'): 4813 object.__delattr__(self, name) 4814 else: 4815 has_value_static: bool = False 4816 value_static = None 4817 try: 4818 value_static = getattr_static(self, name) 4819 has_value_static = True 4820 except AttributeError: 4821 pass 4822 4823 deleted: bool = False 4824 try: 4825 if has_value_static and isfunction(value_static) or ismethod(value_static) or isinstance(value_static, FrameType) or isinstance(value_static, CodeType) or ismethoddescriptor(value_static): 4826 object.__delattr__(self, name) 4827 return 4828 except AttributeError: 4829 pass 4830 4831 try: 4832 if has_value_static and (not isclass(value_static)) and hasattr(value_static, "__delete__"): 4833 object.__delattr__(self, name) 4834 deleted = True 4835 except AttributeError: 4836 pass 4837 4838 try: 4839 del self._tgeneralobject_imutablemapping_attributes[name] 4840 return 4841 except KeyError: 4842 pass 4843 4844 if not deleted: 4845 object.__delattr__(self, name)
4848def tgeneralobject_wrap_obj(obj, mapped_obj_dict: IMutableMapping, setable_data_descriptor_field_names: Set[str], init_mapped_obj_dict: bool): 4849 base = obj.__class__ 4850 setattr(obj, '_tgeneralobject_imutablemapping_attributes', mapped_obj_dict) 4851 setattr(obj, '_tgeneralobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names) 4852 if init_mapped_obj_dict: 4853 object_fields = set(dir(object)) 4854 obj_fields = set(dir(obj)) - object_fields 4855 for key in obj_fields: 4856 value = getattr_static(obj, key) 4857 if key in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or key.startswith('__'): 4858 continue 4859 4860 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 4861 continue 4862 4863 if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))): 4864 continue 4865 4866 if is_setable_data_descriptor(value): 4867 setable_data_descriptor_field_names.add(key) 4868 4869 mapped_obj_dict[key] = getattr(obj, key) 4870 4871 NewClass = type( 4872 base.__name__ + 'WrappedByTGeneralObject', 4873 (base,), 4874 { 4875 '__getattribute__': tgeneralobject_custom_getattribute, 4876 '__setattr__': tgeneralobject_custom_setattr, 4877 '__delattr__': tgeneralobject_custom_delattr, 4878 } 4879 ) 4880 obj.__class__ = NewClass
4883class TGeneralObject: 4884 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 4885 offset, real_size = shared_memory.malloc(ObjectType.tgeneralobject, bs * len(GeneralObjectOffsets)) 4886 created_items_offsets: List[Offset] = list() 4887 try: 4888 make_changes_inplace: bool = True 4889 if isinstance(obj, ForceGeneralObjectCopy): 4890 obj = obj.obj 4891 make_changes_inplace = False 4892 elif isinstance(obj, ForceGeneralObjectInplace): 4893 obj = obj.obj 4894 make_changes_inplace = True 4895 4896 dumped_obj: bytes = pickle_dumps(obj) 4897 dumped_mapped_obj_type, dumped_obj_offset, dumped_obj_type_size = shared_memory.put_obj(dumped_obj) 4898 created_items_offsets.append(dumped_obj_offset) 4899 mapped_obj_dict, obj_dict_offset, obj_dict_size = shared_memory.put_obj(dict()) 4900 created_items_offsets.append(obj_dict_offset) 4901 4902 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj, dumped_obj_offset) 4903 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.obj_dict, obj_dict_offset) 4904 4905 setable_data_descriptor_field_names: Set[str] = set() 4906 4907 mapped_obj = None 4908 if make_changes_inplace: 4909 tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, True) 4910 mapped_obj = obj 4911 else: 4912 # mapped_obj = self.init_from_shared_memory(shared_memory, offset) 4913 mapped_obj = pickle_loads(dumped_obj) 4914 tgeneralobject_wrap_obj(mapped_obj, mapped_obj_dict, setable_data_descriptor_field_names, True) 4915 4916 dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names) 4917 mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names) 4918 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset) 4919 except: 4920 shared_memory.free(offset) 4921 for item_offset in created_items_offsets: 4922 shared_memory.destroy_obj(item_offset) 4923 4924 raise 4925 4926 return mapped_obj, offset, real_size 4927 4928 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 4929 if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset): 4930 raise WrongObjectTypeError 4931 4932 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4933 dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset) 4934 4935 obj_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.obj_dict) 4936 mapped_obj_dict = shared_memory.get_obj(obj_dict_offset) 4937 obj = pickle_loads(dumped_obj) 4938 4939 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.setable_data_descriptor_field_names) 4940 dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset) 4941 setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names) 4942 4943 tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, False) 4944 return obj 4945 4946 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 4947 if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset): 4948 raise WrongObjectTypeError 4949 4950 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4951 shared_memory.destroy_obj(dumped_obj_offset) 4952 obj_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.obj_dict) 4953 if obj_dict_offset: 4954 shared_memory.destroy_obj(obj_dict_offset) 4955 4956 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.setable_data_descriptor_field_names) 4957 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 4958 shared_memory.free(offset) 4959 4960 # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 4961 # if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 4962 # raise WrongObjectTypeError 4963 4964 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4965 # return shared_memory.get_obj_buffer(dumped_obj_offset) 4966 4967 # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 4968 # if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 4969 # raise WrongObjectTypeError 4970 4971 4972 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4973 # return shared_memory.get_obj_buffer_2(dumped_obj_offset)
4946 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 4947 if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset): 4948 raise WrongObjectTypeError 4949 4950 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.pickled_obj) 4951 shared_memory.destroy_obj(dumped_obj_offset) 4952 obj_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.obj_dict) 4953 if obj_dict_offset: 4954 shared_memory.destroy_obj(obj_dict_offset) 4955 4956 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * GeneralObjectOffsets.setable_data_descriptor_field_names) 4957 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 4958 shared_memory.free(offset)
5000class StaticObjectOffsets(IntEnum): 5001 pickled_obj = 0 5002 pickled_attributes_dict = 1 5003 attributes_slots = 2 5004 setable_data_descriptor_field_names = 3
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
5007def tstaticobject_custom_getattribute(self, name): 5008 if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'): 5009 return object.__getattribute__(self, name) 5010 5011 try: 5012 return self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]] 5013 except KeyError: 5014 pass 5015 5016 return object.__getattribute__(self, name)
5019def tstaticobject_custom_setattr(self, name, value): 5020 if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'): 5021 object.__setattr__(self, name, value) 5022 else: 5023 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5024 object.__setattr__(self, name, value) 5025 return 5026 5027 # try: 5028 # if name in self._tstaticobject_setable_data_descriptor_field_names: 5029 # object.__setattr__(self, name, value) 5030 # except AttributeError: 5031 # pass 5032 5033 try: 5034 self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]] = value 5035 return 5036 except KeyError: 5037 pass 5038 5039 object.__setattr__(self, name, value)
5042def tstaticobject_custom_delattr(self, name): 5043 if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'): 5044 object.__delattr__(self, name) 5045 else: 5046 if name in self._tstaticobject_attributes_dict: 5047 raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only") 5048 else: 5049 object.__delattr__(self, name)
5052def tstaticobject_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool): 5053 base = obj.__class__ 5054 setattr(obj, '_tstaticobject_attributes_dict', attributes_dict) 5055 setattr(obj, '_tstaticobject_attributes_slots', attributes_slots) 5056 setattr(obj, '_tstaticobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names) 5057 if init_mapped_attributes: 5058 object_fields = set(dir(object)) 5059 obj_fields = set(dir(obj)) - object_fields 5060 good_fields: List[Hashable] = list() 5061 for key in obj_fields: 5062 value = getattr_static(obj, key) 5063 if key in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or key.startswith('__'): 5064 continue 5065 5066 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5067 continue 5068 5069 if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))): 5070 continue 5071 5072 if is_setable_data_descriptor(value): 5073 setable_data_descriptor_field_names.add(key) 5074 5075 good_fields.append(key) 5076 5077 good_fields_len = len(good_fields) 5078 attributes_slots.set_capacity(good_fields_len) 5079 attributes_slots.extend_with(good_fields_len, 0) 5080 for index, key in enumerate(good_fields): 5081 attributes_dict[key] = index 5082 value = getattr(obj, key) 5083 attributes_slots[index] = value 5084 5085 NewClass = type( 5086 base.__name__ + 'WrappedByTStaticObject', 5087 (base,), 5088 { 5089 '__getattribute__': tstaticobject_custom_getattribute, 5090 '__setattr__': tstaticobject_custom_setattr, 5091 '__delattr__': tstaticobject_custom_delattr, 5092 } 5093 ) 5094 obj.__class__ = NewClass
5097class TStaticObject: 5098 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 5099 offset, real_size = shared_memory.malloc(ObjectType.tstaticobject, bs * len(StaticObjectOffsets)) 5100 created_items_offsets: List[Offset] = list() 5101 try: 5102 make_changes_inplace: bool = True 5103 if isinstance(obj, ForceStaticObjectCopy): 5104 obj = obj.obj 5105 make_changes_inplace = False 5106 elif isinstance(obj, ForceStaticObjectInplace): 5107 obj = obj.obj 5108 make_changes_inplace = True 5109 5110 dumped_obj: bytes = pickle_dumps(obj) 5111 dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj) 5112 created_items_offsets.append(dumped_obj_offset) 5113 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj, dumped_obj_offset) 5114 5115 attributes_dict: Dict = dict() 5116 5117 attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list()) 5118 created_items_offsets.append(attributes_slots_offset) 5119 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.attributes_slots, attributes_slots_offset) 5120 5121 setable_data_descriptor_field_names: Set[str] = set() 5122 5123 mapped_obj = None 5124 if make_changes_inplace: 5125 tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True) 5126 mapped_obj = obj 5127 else: 5128 # mapped_obj = self.init_from_shared_memory(shared_memory, offset) 5129 mapped_obj = pickle_loads(dumped_obj) 5130 tstaticobject_wrap_obj(mapped_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True) 5131 5132 dumped_attributes_dict: bytes = pickle_dumps(attributes_dict) 5133 dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict) 5134 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_attributes_dict, dumped_attributes_dict_offset) 5135 5136 dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names) 5137 mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names) 5138 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset) 5139 except: 5140 shared_memory.free(offset) 5141 for item_offset in created_items_offsets: 5142 shared_memory.destroy_obj(item_offset) 5143 5144 raise 5145 5146 return mapped_obj, offset, real_size 5147 5148 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 5149 if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset): 5150 raise WrongObjectTypeError 5151 5152 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5153 dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset) 5154 obj = pickle_loads(dumped_obj) 5155 5156 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.attributes_slots) 5157 attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset) 5158 5159 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_attributes_dict) 5160 dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset) 5161 attributes_dict = pickle_loads(dumped_attributes_dict) 5162 5163 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.setable_data_descriptor_field_names) 5164 dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset) 5165 setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names) 5166 5167 tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False) 5168 return obj 5169 5170 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5171 if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset): 5172 raise WrongObjectTypeError 5173 5174 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5175 shared_memory.destroy_obj(dumped_obj_offset) 5176 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.attributes_slots) 5177 shared_memory.destroy_obj(attributes_slots_offset) 5178 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_attributes_dict) 5179 shared_memory.destroy_obj(dumped_attributes_dict_offset) 5180 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.setable_data_descriptor_field_names) 5181 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 5182 shared_memory.free(offset) 5183 5184 # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5185 # if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5186 # raise WrongObjectTypeError 5187 5188 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5189 # return shared_memory.get_obj_buffer(dumped_obj_offset) 5190 5191 # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5192 # if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5193 # raise WrongObjectTypeError 5194 5195 5196 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5197 # return shared_memory.get_obj_buffer_2(dumped_obj_offset)
5170 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5171 if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset): 5172 raise WrongObjectTypeError 5173 5174 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_obj) 5175 shared_memory.destroy_obj(dumped_obj_offset) 5176 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.attributes_slots) 5177 shared_memory.destroy_obj(attributes_slots_offset) 5178 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.pickled_attributes_dict) 5179 shared_memory.destroy_obj(dumped_attributes_dict_offset) 5180 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectOffsets.setable_data_descriptor_field_names) 5181 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 5182 shared_memory.free(offset)
5204class StaticObjectWithSlotsOffsets(IntEnum): 5205 pickled_obj = 0 5206 pickled_attributes_dict = 1 5207 attributes_slots = 2 5208 setable_data_descriptor_field_names = 3
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
5211def tstaticobjectwithslots_custom_getattribute(self, name): 5212 if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'): 5213 return object.__getattribute__(self, name) 5214 5215 try: 5216 return self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]] 5217 except KeyError: 5218 pass 5219 5220 return object.__getattribute__(self, name)
5223def tstaticobjectwithslots_custom_setattr(self, name, value): 5224 if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'): 5225 object.__setattr__(self, name, value) 5226 else: 5227 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5228 object.__setattr__(self, name, value) 5229 return 5230 5231 # try: 5232 # if name in self._tstaticobjectwithslots_setable_data_descriptor_field_names: 5233 # object.__setattr__(self, name, value) 5234 # except AttributeError: 5235 # pass 5236 5237 try: 5238 self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]] = value 5239 return 5240 except KeyError: 5241 pass 5242 5243 object.__setattr__(self, name, value)
5246def tstaticobjectwithslots_custom_delattr(self, name): 5247 if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'): 5248 object.__delattr__(self, name) 5249 else: 5250 if name in self._tstaticobjectwithslots_attributes_dict: 5251 raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only") 5252 else: 5253 object.__delattr__(self, name)
5256def tstaticobjectwithslots_custom_init(self, original, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names): 5257 setattr(self, '_tstaticobjectwithslots_attributes_dict', attributes_dict) 5258 setattr(self, '_tstaticobjectwithslots_attributes_slots', attributes_slots) 5259 setattr(self, '_tstaticobjectwithslots_setable_data_descriptor_field_names', setable_data_descriptor_field_names) 5260 for attr_name in good_fields: 5261 setattr(self, attr_name, getattr(original, attr_name))
5264def tstaticobjectwithslots_custom_eq(self, other): 5265 parent_class = self.__class__.__bases__[0] 5266 if not isinstance(other, (type(self), parent_class)): 5267 return NotImplemented 5268 5269 for key in self._tstaticobjectwithslots_attributes_dict.keys(): 5270 if not hasattr(other, key): 5271 return False 5272 5273 if getattr(self, key) != getattr(other, key): 5274 return False 5275 5276 return True
5279def tstaticobjectwithslots_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool) -> Any: 5280 base = obj.__class__ 5281 5282 good_fields: List[Hashable] = list() 5283 if init_mapped_attributes: 5284 if hasattr(base, '__slots__'): 5285 obj_fields = base.__slots__ 5286 else: 5287 object_fields = set(dir(object)) 5288 obj_fields = set(dir(obj)) - object_fields 5289 5290 for key in obj_fields: 5291 value = getattr_static(obj, key) 5292 if key in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or key.startswith('__'): 5293 continue 5294 5295 if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value): 5296 continue 5297 5298 if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))): 5299 continue 5300 5301 if is_setable_data_descriptor(value): 5302 setable_data_descriptor_field_names.add(key) 5303 5304 good_fields.append(key) 5305 5306 good_fields_len = len(good_fields) 5307 attributes_slots.set_capacity(good_fields_len) 5308 attributes_slots.extend_with(good_fields_len, 0) 5309 for index, key in enumerate(good_fields): 5310 attributes_dict[key] = index 5311 value = getattr(obj, key) 5312 attributes_slots[index] = value 5313 5314 NewClass = type( 5315 base.__name__ + 'WrappedByTStaticObjectWithSlots', 5316 (base,), 5317 { 5318 '__slots__': ['__dict__'], 5319 '__init__': tstaticobjectwithslots_custom_init, 5320 '__eq__': tstaticobjectwithslots_custom_eq, 5321 '__getattribute__': tstaticobjectwithslots_custom_getattribute, 5322 '__setattr__': tstaticobjectwithslots_custom_setattr, 5323 '__delattr__': tstaticobjectwithslots_custom_delattr, 5324 } 5325 ) 5326 5327 new_obj = NewClass(obj, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names) 5328 5329 return new_obj
5332class TStaticObjectWithSlots: 5333 def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]: 5334 offset, real_size = shared_memory.malloc(ObjectType.tstaticobjectwithslots, bs * len(StaticObjectWithSlotsOffsets)) 5335 created_items_offsets: List[Offset] = list() 5336 try: 5337 dumped_obj: bytes = pickle_dumps(obj) 5338 dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj) 5339 created_items_offsets.append(dumped_obj_offset) 5340 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj, dumped_obj_offset) 5341 5342 attributes_dict: Dict = dict() 5343 5344 attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list()) 5345 created_items_offsets.append(attributes_slots_offset) 5346 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.attributes_slots, attributes_slots_offset) 5347 5348 setable_data_descriptor_field_names: Set[str] = set() 5349 5350 mapped_obj = None 5351 loaded_obj = pickle_loads(dumped_obj) 5352 mapped_obj = tstaticobjectwithslots_wrap_obj(loaded_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True) 5353 5354 dumped_attributes_dict: bytes = pickle_dumps(attributes_dict) 5355 dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict) 5356 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_attributes_dict, dumped_attributes_dict_offset) 5357 5358 dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names) 5359 mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names) 5360 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset) 5361 except: 5362 shared_memory.free(offset) 5363 for item_offset in created_items_offsets: 5364 shared_memory.destroy_obj(item_offset) 5365 5366 raise 5367 5368 return mapped_obj, offset, real_size 5369 5370 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any: 5371 if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset): 5372 raise WrongObjectTypeError 5373 5374 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5375 dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset) 5376 obj = pickle_loads(dumped_obj) 5377 5378 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.attributes_slots) 5379 attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset) 5380 5381 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_attributes_dict) 5382 dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset) 5383 attributes_dict = pickle_loads(dumped_attributes_dict) 5384 5385 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names) 5386 dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset) 5387 setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names) 5388 5389 mapped_obj = tstaticobjectwithslots_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False) 5390 return mapped_obj 5391 5392 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5393 if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset): 5394 raise WrongObjectTypeError 5395 5396 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5397 shared_memory.destroy_obj(dumped_obj_offset) 5398 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.attributes_slots) 5399 shared_memory.destroy_obj(attributes_slots_offset) 5400 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_attributes_dict) 5401 shared_memory.destroy_obj(dumped_attributes_dict_offset) 5402 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names) 5403 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 5404 shared_memory.free(offset) 5405 5406 # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5407 # if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5408 # raise WrongObjectTypeError 5409 5410 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5411 # return shared_memory.get_obj_buffer(dumped_obj_offset) 5412 5413 # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5414 # if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5415 # raise WrongObjectTypeError 5416 5417 5418 # dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5419 # return shared_memory.get_obj_buffer_2(dumped_obj_offset)
5392 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5393 if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset): 5394 raise WrongObjectTypeError 5395 5396 dumped_obj_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_obj) 5397 shared_memory.destroy_obj(dumped_obj_offset) 5398 attributes_slots_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.attributes_slots) 5399 shared_memory.destroy_obj(attributes_slots_offset) 5400 dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.pickled_attributes_dict) 5401 shared_memory.destroy_obj(dumped_attributes_dict_offset) 5402 dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names) 5403 shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset) 5404 shared_memory.free(offset)
5426class TNumpyNdarrayOffsets(IntEnum): 5427 data_buffer_offset = 0 5428 shape_tuple_offset = 1 5429 pickled_datatype_offset = 2
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
5432class TNumpyNdarray: 5433 def map_to_shared_memory(self, shared_memory: 'SharedMemory', nparray: np.ndarray) -> Tuple[np.ndarray, Offset, Size]: 5434 shape = tuple(nparray.shape) 5435 data_type = nparray.dtype 5436 pickled_data_type = pickle_dumps(data_type) 5437 data_buffer: bytes = nparray.tobytes() 5438 offset, real_size = shared_memory.malloc(ObjectType.tnumpyndarray, bs * len(TNumpyNdarrayOffsets)) 5439 created_items_offsets: List[Offset] = list() 5440 try: 5441 data_buffer_mapped_obj, data_buffer_offset, data_buffer_size = shared_memory.put_obj(data_buffer) 5442 created_items_offsets.append(data_buffer_offset) 5443 shape_mapped_obj, shape_offset, shape_size = shared_memory.put_obj(shape) 5444 created_items_offsets.append(shape_offset) 5445 pickled_data_type_mapped_obj, pickled_data_type_offset, pickled_data_type_size = shared_memory.put_obj(pickled_data_type) 5446 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset, data_buffer_offset) 5447 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.shape_tuple_offset, shape_offset) 5448 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.pickled_datatype_offset, pickled_data_type_offset) 5449 mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type) 5450 except: 5451 shared_memory.free(offset) 5452 for item_offset in created_items_offsets: 5453 shared_memory.destroy_obj(item_offset) 5454 5455 raise 5456 5457 return mapped_nparray, offset, real_size 5458 5459 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict: 5460 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset): 5461 raise WrongObjectTypeError 5462 5463 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5464 shape_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.shape_tuple_offset) 5465 pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.pickled_datatype_offset) 5466 shape = shared_memory.get_obj(shape_offset) 5467 pickled_data_type = shared_memory.get_obj(pickled_data_type_offset) 5468 data_type = pickle_loads(pickled_data_type) 5469 mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type) 5470 return mapped_nparray 5471 5472 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5473 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset): 5474 raise WrongObjectTypeError 5475 5476 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5477 shape_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.shape_tuple_offset) 5478 pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.pickled_datatype_offset) 5479 shared_memory.destroy_obj(data_buffer_offset) 5480 shared_memory.destroy_obj(shape_offset) 5481 shared_memory.destroy_obj(pickled_data_type_offset) 5482 shared_memory.free(offset) 5483 5484 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5485 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5486 raise WrongObjectTypeError 5487 5488 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5489 return shared_memory.get_obj_buffer(data_buffer_offset) 5490 5491 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5492 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5493 raise WrongObjectTypeError 5494 5495 5496 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5497 return shared_memory.get_obj_buffer_2(data_buffer_offset)
5472 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5473 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset): 5474 raise WrongObjectTypeError 5475 5476 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5477 shape_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.shape_tuple_offset) 5478 pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.pickled_datatype_offset) 5479 shared_memory.destroy_obj(data_buffer_offset) 5480 shared_memory.destroy_obj(shape_offset) 5481 shared_memory.destroy_obj(pickled_data_type_offset) 5482 shared_memory.free(offset)
5484 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5485 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5486 raise WrongObjectTypeError 5487 5488 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5489 return shared_memory.get_obj_buffer(data_buffer_offset)
5491 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5492 if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5493 raise WrongObjectTypeError 5494 5495 5496 data_buffer_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TNumpyNdarrayOffsets.data_buffer_offset) 5497 return shared_memory.get_obj_buffer_2(data_buffer_offset)
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
5508class TTorchTensor: 5509 def map_to_shared_memory(self, shared_memory: 'SharedMemory', tensor: Tensor) -> Tuple[Tensor, Offset, Size]: 5510 offset, real_size = shared_memory.malloc(ObjectType.ttorchtensor, bs * len(TTorchTensorOffsets)) 5511 created_items_offsets: List[Offset] = list() 5512 try: 5513 numpy_ndarray_mapped_obj, numpy_ndarray_offset, numpy_ndarray_size = shared_memory.put_obj(tensor.numpy()) 5514 created_items_offsets.append(numpy_ndarray_offset) 5515 write_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset, numpy_ndarray_offset) 5516 mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj) 5517 except: 5518 self._offset = None 5519 shared_memory.free(offset) 5520 for item_offset in created_items_offsets: 5521 shared_memory.destroy_obj(item_offset) 5522 5523 raise 5524 return mapped_torch_tensor, offset, real_size 5525 5526 def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict: 5527 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset): 5528 raise WrongObjectTypeError 5529 5530 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5531 numpy_ndarray_mapped_obj: np.ndarray = shared_memory.get_obj(numpy_ndarray_offset) 5532 mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj) 5533 return mapped_torch_tensor 5534 5535 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5536 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset): 5537 raise WrongObjectTypeError 5538 5539 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5540 shared_memory.destroy_obj(numpy_ndarray_offset) 5541 shared_memory.free(offset) 5542 5543 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5544 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5545 raise WrongObjectTypeError 5546 5547 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5548 return shared_memory.get_obj_buffer(numpy_ndarray_offset) 5549 5550 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5551 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5552 raise WrongObjectTypeError 5553 5554 5555 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5556 return shared_memory.get_obj_buffer_2(numpy_ndarray_offset)
5535 def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None: 5536 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset): 5537 raise WrongObjectTypeError 5538 5539 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5540 shared_memory.destroy_obj(numpy_ndarray_offset) 5541 shared_memory.free(offset)
5543 def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview: 5544 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5545 raise WrongObjectTypeError 5546 5547 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5548 return shared_memory.get_obj_buffer(numpy_ndarray_offset)
5550 def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]: 5551 if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + bs * BaseObjOffsets.obj_type): 5552 raise WrongObjectTypeError 5553 5554 5555 numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + bs * len(BaseObjOffsets) + bs * TTorchTensorOffsets.numpy_ndarray_offset) 5556 return shared_memory.get_obj_buffer_2(numpy_ndarray_offset)
5603class MessageOffsets(IntEnum): 5604 previous_message_offset = 0 5605 next_message_offset = 1 5606 item_offset = 2
An enumeration.
Inherited Members
- enum.Enum
- name
- value
- builtins.int
- conjugate
- bit_length
- to_bytes
- from_bytes
- as_integer_ratio
- real
- imag
- numerator
- denominator
6831class GetInLine: 6832 def __init__(self, shared_memory: SharedMemory): 6833 self.shared_memory: SharedMemory = shared_memory 6834 6835 def __enter__(self): 6836 self.shared_memory.get_in_line() 6837 return 6838 6839 def __exit__(self, exc_type, exc_value, traceback): 6840 self.shared_memory.release()
6855class WaitMyTurn: 6856 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6857 self.shared_memory: SharedMemory = shared_memory 6858 self.time_limit: Optional[RationalNumber] = time_limit 6859 self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time 6860 6861 def __enter__(self): 6862 self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time) 6863 return 6864 6865 def __exit__(self, exc_type, exc_value, traceback): 6866 self.shared_memory.release()
6856 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6857 self.shared_memory: SharedMemory = shared_memory 6858 self.time_limit: Optional[RationalNumber] = time_limit 6859 self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
6888class WaitMyTurnWhenHasMessages: 6889 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6890 self.shared_memory: SharedMemory = shared_memory 6891 self.time_limit: Optional[RationalNumber] = time_limit 6892 self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time 6893 6894 def __enter__(self): 6895 while True: 6896 if not self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time): 6897 raise OperationTimedOutError 6898 6899 if self.shared_memory.has_messages(): 6900 return 6901 else: 6902 self.shared_memory.release() 6903 6904 def __exit__(self, exc_type, exc_value, traceback): 6905 self.shared_memory.release()
6889 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001): 6890 self.shared_memory: SharedMemory = shared_memory 6891 self.time_limit: Optional[RationalNumber] = time_limit 6892 self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
6911class await_my_turn: 6912 def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None): 6913 self.shared_memory: SharedMemory = shared_memory 6914 self.time_limit: Optional[RationalNumber] = time_limit 6915 6916 async def __aenter__(self): 6917 await self.shared_memory.await_my_turn(self.time_limit) 6918 6919 async def __aexit__(self, exc_type, exc_val, exc_tb): 6920 self.shared_memory.release()
6939def make_numpy_array_from_obj_offset(shared_memory: SharedMemory, offset: Offset, np_shape, np_dtype_or_ctypes_type = None) -> Any: 6940 if np_dtype_or_ctypes_type is None: 6941 np_dtype_or_ctypes_type = ctypes.c_uint8 6942 6943 data_offset, data_size = shared_memory.get_obj_buffer_2(offset) 6944 if isinstance(np_dtype_or_ctypes_type, _SimpleCData): 6945 num_elements = np.prod(np_shape) 6946 np_array_size = num_elements * ctypes.sizeof(np_dtype_or_ctypes_type) 6947 if data_size < np_array_size: 6948 raise ObjBufferIsSmallerThanRequestedNumpyArrayError(data_size, np_array_size) 6949 6950 data_address = shared_memory.base_address + data_offset 6951 void_ptr = ctypes.c_void_p(data_address) 6952 # actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type * num_elements)) 6953 actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type)) 6954 return np.ctypeslib.as_array(actual_ptr, shape=np_shape) 6955 else: 6956 return np.ndarray(np_shape, dtype=np_dtype_or_ctypes_type, buffer=shared_memory.mem_view(data_offset, data_size))
6980def intenum_dict_to_list(mapping: AbsMapping, int_enum_class: Optional[Type] = None) -> List: 6981 if int_enum_class: 6982 items_num = len(int_enum_class) 6983 else: 6984 first_key_type_detected: bool = False 6985 for first_key in mapping.keys(): 6986 first_key_type = type(first_key) 6987 if issubclass(first_key_type, IntEnum): 6988 items_num = len(first_key_type) 6989 first_key_type_detected = True 6990 6991 if not first_key_type_detected: 6992 items_num = max(mapping.keys(), key=lambda value: int(value)) 6993 6994 result = [None] * items_num 6995 for key, value in mapping.items(): 6996 result[int(key)] = value 6997 6998 return result